diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000..c9b72628dd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,40 @@ +--- +name: Feature Request +about: Use this template for requesting new features +title: +labels: feature +assignees: + +--- + + + + + + +**Description** + + + +**Requirements** + + +**Acceptance Criteria (Definition of Done)** + + +**(Optional): Suggest A Solution** + diff --git a/.github/ISSUE_TEMPLATE/fix_file.md b/.github/ISSUE_TEMPLATE/fix_file.md new file mode 100644 index 0000000000..1e05f0c9df --- /dev/null +++ b/.github/ISSUE_TEMPLATE/fix_file.md @@ -0,0 +1,24 @@ +--- +name: Fix File Update +about: Use this template for adding, updating, or removing fix files from global dataset +title: +labels: Fix Files +assignees: + - KateFriedman-NOAA + - WalterKolczynski-NOAA + +--- + +**Description** + + + + + + +**Tasks** + +- [ ] Discuss needs with global-workflow developer assigned to request. +- [ ] Add/update/remove fix file(s) in fix sets on supported platforms (global-workflow assignee task). +- [ ] Update "Fix File Management" spreadsheet (https://docs.google.com/spreadsheets/d/1BeIvcz6TO3If4YCqkUK-oz_kGS9q2wTjwLS-BBemSEY/edit?usp=sharing). +- [ ] Make related workflow/component updates. diff --git a/.github/ISSUE_TEMPLATE/production_update.md b/.github/ISSUE_TEMPLATE/production_update.md new file mode 100644 index 0000000000..fd517d3d0a --- /dev/null +++ b/.github/ISSUE_TEMPLATE/production_update.md @@ -0,0 +1,31 @@ +--- +name: Production Update +about: Use this template for operational production updates +title: +labels: production update +assignees: + - KateFriedman-NOAA + +--- + +**Description** + + + + +**Workflow Changes** + + + +**Tasks** +- [ ] Create release branch +- [ ] Make workflow changes for upgrade in release branch (add additional checklist items as needed) +- [ ] Create release notes +- [ ] Cut hand-off tag for CDF +- [ ] Submit CDF to NCO +- [ ] Implementation into operations complete +- [ ] Merge release branch into operational branch +- [ ] Cut version tag from operational branch +- [ ] Release new version tag +- [ ] Announce to users +- [ ] Update Read-The-Docs operations status version in develop diff --git a/.github/scripts/build_docs.sh b/.github/scripts/build_docs.sh new file mode 100755 index 0000000000..7fb6701da2 --- /dev/null +++ b/.github/scripts/build_docs.sh @@ -0,0 +1,31 @@ +#! /bin/bash + +set -eux + +# path to docs directory relative to top level of repository +# $GITHUB_WORKSPACE is set if the actions/checkout@v3 action is run first + +cwd=$(pwd) +DOCS_DIR="${GITHUB_WORKSPACE}/docs" + +# run Make to build the documentation and return to previous directory +cd "${DOCS_DIR}" +make clean html +cd "${cwd}" + +# copy HTML output into directory to create an artifact +mkdir -p artifact/documentation +cp -R "${DOCS_DIR}/build/html/." artifact/documentation + +# check if the warnings.log file is empty +# Copy it into the artifact and documeentation directories +# so it will be available in the artifacts +warning_file="${DOCS_DIR}/build/warnings.log" +if [[ -s ${warning_file} ]]; then + cp -r "${DOCS_DIR}/build/warnings.log" artifact/doc_warnings.log + cp artifact/doc_warnings.log artifact/documentation + echo "Warnings were encountered while building documentation." + echo "========== Begin warnings ==========" + cat artifact/doc_warnings.log + echo "=========== End warnings ===========" +fi diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml new file mode 100644 index 0000000000..ae083a3c0b --- /dev/null +++ b/.github/workflows/docs.yaml @@ -0,0 +1,51 @@ +name: Build and Deploy Documentation +on: + push: + branches: + - develop + - feature/* + - main/* + - bugfix/* + - release/* + paths: + - docs/** + pull_request: + types: [opened, reopened, synchronize] + +jobs: + documentation: + runs-on: ubuntu-latest + name: Build and deploy documentation + + steps: + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - name: Install (upgrade) python dependencies + run: | + pip install --upgrade pip sphinx sphinx-gallery sphinx_rtd_theme sphinxcontrib-bibtex + + - name: Checkout + uses: actions/checkout@v3 + + - name: Build documentation + run: | + ./.github/scripts/build_docs.sh + + - name: Upload documentation (on success) + uses: actions/upload-artifact@v3 + if: always() + with: + name: documentation + path: artifact/documentation + + - name: Upload warnings (on failure) + uses: actions/upload-artifact@v3 + if: failure() + with: + name: documentation_warnings.log + path: artifact/doc_warnings.log + if-no-files-found: ignore + diff --git a/.github/workflows/linters.yaml b/.github/workflows/linters.yaml new file mode 100644 index 0000000000..488b6a1407 --- /dev/null +++ b/.github/workflows/linters.yaml @@ -0,0 +1,64 @@ +# +name: shellnorms +on: + pull_request: + +permissions: + contents: read + +defaults: + run: + shell: bash -o pipefail {0} + +jobs: + lint-shell: + runs-on: ubuntu-latest + + permissions: + security-events: write + + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - id: ShellCheck + name: Lint shell scripts + uses: redhat-plumbers-in-action/differential-shellcheck@v4 + with: + token: ${{ secrets.GITHUB_TOKEN }} + + - if: ${{ always() }} + name: Upload artifact with ShellCheck defects in SARIF format + uses: actions/upload-artifact@v3 + with: + name: Differential ShellCheck SARIF + path: ${{ steps.ShellCheck.outputs.sarif }} + + # lint-python: + # runs-on: ubuntu-latest + + # permissions: + # security-events: write + + # steps: + # - name: Checkout code + # uses: actions/checkout@v3 + + # - id: VCS_Diff_Lint + # name: Lint python scripts + # uses: fedora-copr/vcs-diff-lint-action@v1 + + # - if: ${{ always() }} + # name: Upload artifact with detected defects in SARIF format + # uses: actions/upload-artifact@v3 + # with: + # name: VCS Diff Lint SARIF + # path: ${{ steps.VCS_Diff_Lint.outputs.sarif }} + + # - if: ${{ failure() }} + # name: Upload SARIF to GitHub using github/codeql-action/upload-sarif + # uses: github/codeql-action/upload-sarif@v2 + # with: + # sarif_file: ${{ steps.VCS_Diff_Lint.outputs.sarif }} diff --git a/.github/workflows/pynorms.yaml b/.github/workflows/pynorms.yaml new file mode 100644 index 0000000000..7f823f8318 --- /dev/null +++ b/.github/workflows/pynorms.yaml @@ -0,0 +1,24 @@ +name: pynorms +on: [push, pull_request] + +jobs: + check_norms: + runs-on: ubuntu-latest + name: Check Python coding norms with pycodestyle + + steps: + + - name: Install dependencies + run: | + pip install --upgrade pip + pip install pycodestyle + + - name: Checkout + uses: actions/checkout@v3 + with: + path: global-workflow + + - name: Run pycodestyle + run: | + cd $GITHUB_WORKSPACE/global-workflow + pycodestyle -v --config ./.pycodestyle --exclude='.git,.github' ./ diff --git a/.github/workflows/pytests.yaml b/.github/workflows/pytests.yaml new file mode 100644 index 0000000000..f15a776c0f --- /dev/null +++ b/.github/workflows/pytests.yaml @@ -0,0 +1,36 @@ +name: pytests +on: [push, pull_request] + +jobs: + run_pytests: + runs-on: ubuntu-latest + name: Install pygw and run tests with pytests + strategy: + max-parallel: 1 + matrix: + python: ["3.7", "3.8", "3.9", "3.10"] + + steps: + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - name: Install (upgrade) python dependencies + run: | + pip install --upgrade pip + + - name: Checkout + uses: actions/checkout@v3 + with: + path: global-workflow + + - name: Install pygw + run: | + cd $GITHUB_WORKSPACE/global-workflow/ush/python/pygw + pip install .[dev] + + - name: Run pytests + run: | + cd $GITHUB_WORKSPACE/global-workflow/ush/python/pygw + pytest -v src/tests diff --git a/.gitignore b/.gitignore index 766469c97a..d09198e36d 100644 --- a/.gitignore +++ b/.gitignore @@ -5,7 +5,16 @@ __pycache__ *.[aox] *.mod *.sw[a-p] +._* +.DS_Store +#nohup.out - some users do not want this to be a part of .gitignore. TODO: review against best practices +.idea/ +.vscode/ +nohup.out +# Ignore editor generated backup files +#------------------------------------- +*~ # Ignore folders #------------------- exec/ @@ -15,8 +24,23 @@ install*/ # Ignore fix directory symlinks #------------------------------ fix/0readme -fix/fix_* -fix/gdas/ +fix/aer +fix/am +fix/chem +fix/cice +fix/cpl +fix/datm +fix/gdas +fix/gldas +fix/gsi +fix/lut +fix/mom6 +fix/orog +fix/reg2grb2 +fix/sfc_climo +fix/ugwd +fix/verif +fix/wave fix/wafs # Ignore parm file symlinks @@ -25,7 +49,7 @@ parm/config/config.base parm/gldas parm/mon parm/post/AEROSOL_LUTS.dat -parm/post/nam_micro_lookup.dat +parm/post/nam_micro_lookup.dat parm/post/optics_luts_DUST.dat parm/post/gtg.config.gfs parm/post/gtg_imprintings.txt @@ -73,109 +97,27 @@ parm/wafs #-------------------------------------------- sorc/*log sorc/logs -sorc/ufs_model.fd -sorc/gfs_post.fd -sorc/gfs_wafs.fd -sorc/gldas.fd -sorc/gsi.fd -sorc/ufs_utils.fd -sorc/verif-global.fd - -# Ignore sorc symlinks -#--------------------- -sorc/calc_analysis.fd -sorc/calc_increment_ens.fd -sorc/calc_increment_ens_ncio.fd -sorc/emcsfc_ice_blend.fd -sorc/emcsfc_snow2mdl.fd -sorc/fregrid.fd -sorc/gdas2gldas.fd -sorc/getsfcensmeanp.fd -sorc/getsigensmeanp_smooth.fd -sorc/getsigensstatp.fd -sorc/gfs_ncep_post.fd -sorc/gldas2gdas.fd -sorc/gldas_forcing.fd -sorc/gldas_model.fd -sorc/gldas_post.fd -sorc/gldas_rst.fd -sorc/global_chgres.fd -sorc/global_cycle.fd -sorc/global_enkf.fd -sorc/global_gsi.fd -sorc/interp_inc.fd -sorc/make_hgrid.fd -sorc/make_solo_mosaic.fd -sorc/ncdiag_cat.fd -sorc/nst_tf_chg.fd -sorc/oznmon_horiz.fd -sorc/oznmon_time.fd -sorc/radmon_angle.fd -sorc/radmon_bcoef.fd -sorc/radmon_bcor.fd -sorc/radmon_time.fd -sorc/recentersigp.fd -sorc/upp.fd -sorc/wafs_awc_wafavn.fd -sorc/wafs_blending.fd -sorc/wafs_blending_0p25.fd -sorc/wafs_cnvgrib2.fd -sorc/wafs_gcip.fd -sorc/wafs_grib2_0p25.fd -sorc/wafs_makewafs.fd -sorc/wafs_setmissing.fd +sorc/*.cd +sorc/*.fd # Ignore scripts from externals #------------------------------ # jobs symlinks -jobs/JGDAS_ATMOS_ANALYSIS_DIAG -jobs/JGDAS_ATMOS_CHGRES_FORENKF -jobs/JGDAS_ATMOS_GLDAS -jobs/JGDAS_ATMOS_VERFOZN -jobs/JGDAS_ATMOS_VERFRAD -jobs/JGDAS_ATMOS_VMINMON -jobs/JGDAS_ENKF_DIAG -jobs/JGDAS_ENKF_ECEN -jobs/JGDAS_ENKF_FCST -jobs/JGDAS_ENKF_POST -jobs/JGDAS_ENKF_SELECT_OBS -jobs/JGDAS_ENKF_SFC -jobs/JGDAS_ENKF_UPDATE -jobs/JGFS_ATMOS_VMINMON jobs/JGFS_ATMOS_WAFS jobs/JGFS_ATMOS_WAFS_BLENDING jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 jobs/JGFS_ATMOS_WAFS_GCIP jobs/JGFS_ATMOS_WAFS_GRIB2 jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 -jobs/JGLOBAL_ATMOS_ANALYSIS -jobs/JGLOBAL_ATMOS_ANALYSIS_CALC # scripts symlinks scripts/exemcsfc_global_sfc_prep.sh -scripts/exgdas_atmos_chgres_forenkf.sh -scripts/exgdas_atmos_gldas.sh -scripts/exgdas_atmos_verfozn.sh -scripts/exgdas_atmos_verfrad.sh -scripts/exgdas_atmos_vminmon.sh -scripts/exgdas_enkf_ecen.sh -scripts/exgdas_enkf_fcst.sh -scripts/exgdas_enkf_post.sh -scripts/exgdas_enkf_select_obs.sh -scripts/exgdas_enkf_sfc.sh -scripts/exgdas_enkf_update.sh -scripts/exgfs_atmos_vminmon.sh scripts/exgfs_atmos_wafs_blending.sh scripts/exgfs_atmos_wafs_blending_0p25.sh scripts/exgfs_atmos_wafs_gcip.sh scripts/exgfs_atmos_wafs_grib.sh scripts/exgfs_atmos_wafs_grib2.sh scripts/exgfs_atmos_wafs_grib2_0p25.sh -scripts/exglobal_atmos_analysis.sh -scripts/exglobal_atmos_analysis_calc.sh -scripts/exglobal_diag.sh # ush symlinks -ush/calcanl_gfs.py -ush/calcinc_gfs.py ush/chgres_cube.sh ush/emcsfc_ice_blend.sh ush/emcsfc_snow.sh @@ -184,29 +126,20 @@ ush/fv3gfs_driver_grid.sh ush/fv3gfs_filter_topo.sh ush/fv3gfs_make_grid.sh ush/fv3gfs_make_orog.sh -ush/getncdimlen -ush/gldas_archive.sh -ush/gldas_forcing.sh -ush/gldas_get_data.sh -ush/gldas_liscrd.sh -ush/gldas_post.sh -ush/gldas_process_data.sh ush/global_chgres.sh ush/global_chgres_driver.sh ush/global_cycle.sh ush/global_cycle_driver.sh -ush/gsi_utils.py -ush/minmon_xtrct_costs.pl -ush/minmon_xtrct_gnorms.pl -ush/minmon_xtrct_reduct.pl +ush/jediinc2fv3.py ush/mkwfsgbl.sh -ush/ozn_xtrct.sh -ush/radmon_ck_stdout.sh -ush/radmon_err_rpt.sh -ush/radmon_verf_angle.sh -ush/radmon_verf_bcoef.sh -ush/radmon_verf_bcor.sh -ush/radmon_verf_time.sh +ush/ufsda ush/wafs_blending.sh ush/wafs_grib2.regrid.sh ush/wafs_intdsk.sh +ush/finddate.sh +ush/make_NTC_file.pl +ush/make_ntc_bull.pl +ush/make_tif.sh +ush/month_name.sh +ush/imsfv3_scf2ioda.py +ush/letkf_create_ens.py diff --git a/.pycodestyle b/.pycodestyle new file mode 100644 index 0000000000..8bd18fa9d7 --- /dev/null +++ b/.pycodestyle @@ -0,0 +1,6 @@ +[pycodestyle] +count = False +ignore = E402,W504 +max-line-length = 160 +statistics = True +exclude = Experimental diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000000..fa854552e5 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,21 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.11" + +# Build documentation in the docs/ directory with Sphinx +sphinx: + configuration: docs/source/conf.py + +python: + install: + - requirements: docs/requirements.txt + system_packages: true diff --git a/.shellcheckrc b/.shellcheckrc new file mode 100644 index 0000000000..6d540ba17f --- /dev/null +++ b/.shellcheckrc @@ -0,0 +1,16 @@ +# Global settings for Shellcheck (https://github.com/koalaman/shellcheck) +enable=all + +external-sources=false + +# Disable variable referenced but not assigned +disable=SC2154 + +# Disable following non-constant source +disable=SC1090 + +# Disable non-existent binary +disable=SC1091 + +# Disable -p -m only applies to deepest directory +disable=SC2174 diff --git a/Externals.cfg b/Externals.cfg index 6ba64d0299..1fde0c5033 100644 --- a/Externals.cfg +++ b/Externals.cfg @@ -1,59 +1,65 @@ # External sub-modules of global-workflow [UFS] -hash = 889254a63e38b5318cefb6d07a75ea99837f7bf4 +tag = 2247060 local_path = sorc/ufs_model.fd repo_url = https://github.com/ufs-community/ufs-weather-model.git protocol = git required = True -[GSI] -hash = 9c1fc15d42573b398037319bbf8d5143ad126fb6 -local_path = sorc/gsi.fd -repo_url = https://github.com/NOAA-EMC/GSI.git -protocol = git -required = True - -[GLDAS] -tag = gldas_gfsv16_release.v1.15.0 -local_path = sorc/gldas.fd -repo_url = https://github.com/NOAA-EMC/GLDAS.git +[gfs-utils] +hash = 8965258 +local_path = sorc/gfs_utils.fd +repo_url = https://github.com/NOAA-EMC/gfs-utils protocol = git required = True -[UPP] -#No externals setting = .gitmodules will be invoked for CMakeModules and comupp/src/lib/crtm2 submodules -hash = ff42e0227d6100285d4179a2572b700fd5a959cb -local_path = sorc/gfs_post.fd -repo_url = https://github.com/NOAA-EMC/UPP.git -protocol = git -required = True - -[UFS_UTILS] -tag = ufs_utils_1_8_0 +[UFS-Utils] +hash = 72a0471 local_path = sorc/ufs_utils.fd repo_url = https://github.com/ufs-community/UFS_UTILS.git protocol = git required = True [EMC_verif-global] -tag = verif_global_v2.5.2 +tag = c267780 local_path = sorc/verif-global.fd repo_url = https://github.com/NOAA-EMC/EMC_verif-global.git protocol = git required = True -[EMC_gfs_wafs] -hash = c2a29a67d9432b4d6fba99eac7797b81d05202b6 -local_path = sorc/gfs_wafs.fd -repo_url = https://github.com/NOAA-EMC/EMC_gfs_wafs.git +[GSI-EnKF] +hash = 113e307 +local_path = sorc/gsi_enkf.fd +repo_url = https://github.com/NOAA-EMC/GSI.git protocol = git required = False -[aeroconv] -hash = 24f6ddc -local_path = sorc/aeroconv.fd -repo_url = https://github.com/NCAR/aeroconv.git +[GSI-Utils] +hash = 322cc7b +local_path = sorc/gsi_utils.fd +repo_url = https://github.com/NOAA-EMC/GSI-utils.git +protocol = git +required = False + +[GSI-Monitor] +hash = 45783e3 +local_path = sorc/gsi_monitor.fd +repo_url = https://github.com/NOAA-EMC/GSI-monitor.git +protocol = git +required = False + +[GDASApp] +hash = 81675c9 +local_path = sorc/gdas.cd +repo_url = https://github.com/NOAA-EMC/GDASApp.git +protocol = git +required = False + +[EMC-gfs_wafs] +hash = 014a0b8 +local_path = sorc/gfs_wafs.fd +repo_url = https://github.com/NOAA-EMC/EMC_gfs_wafs.git protocol = git required = False diff --git a/FV3GFSwfm/test_hera/config.aero b/FV3GFSwfm/test_hera/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/test_hera/config.aeroanl b/FV3GFSwfm/test_hera/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/test_hera/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/test_hera/config.aeroanlfinal b/FV3GFSwfm/test_hera/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/test_hera/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/test_hera/config.aeroanlinit b/FV3GFSwfm/test_hera/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/test_hera/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/test_hera/config.aeroanlrun b/FV3GFSwfm/test_hera/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/test_hera/config.aerosol_init b/FV3GFSwfm/test_hera/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/test_hera/config.anal b/FV3GFSwfm/test_hera/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/test_hera/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/test_hera/config.analcalc b/FV3GFSwfm/test_hera/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/test_hera/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/test_hera/config.analdiag b/FV3GFSwfm/test_hera/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/test_hera/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/test_hera/config.arch b/FV3GFSwfm/test_hera/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/test_hera/config.atmanl b/FV3GFSwfm/test_hera/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/test_hera/config.atmanlfinal b/FV3GFSwfm/test_hera/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/test_hera/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/test_hera/config.atmanlinit b/FV3GFSwfm/test_hera/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/test_hera/config.atmanlrun b/FV3GFSwfm/test_hera/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/test_hera/config.atmensanl b/FV3GFSwfm/test_hera/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/test_hera/config.atmensanlfinal b/FV3GFSwfm/test_hera/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/test_hera/config.atmensanlinit b/FV3GFSwfm/test_hera/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/test_hera/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/test_hera/config.atmensanlrun b/FV3GFSwfm/test_hera/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/test_hera/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/test_hera/config.awips b/FV3GFSwfm/test_hera/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/test_hera/config.base b/FV3GFSwfm/test_hera/config.base new file mode 100644 index 0000000000..4ed2250ddf --- /dev/null +++ b/FV3GFSwfm/test_hera/config.base @@ -0,0 +1,383 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${ROTDIR}/.." +export PTMP="${ROTDIR}/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="test_hera" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#JKHexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#JKHexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#JKHexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#JKHexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ### JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/test_hera/config.com b/FV3GFSwfm/test_hera/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/test_hera/config.coupled_ic b/FV3GFSwfm/test_hera/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/test_hera/config.defaults.s2sw b/FV3GFSwfm/test_hera/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/test_hera/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/test_hera/config.earc b/FV3GFSwfm/test_hera/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/test_hera/config.ecen b/FV3GFSwfm/test_hera/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/test_hera/config.echgres b/FV3GFSwfm/test_hera/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/test_hera/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/test_hera/config.ediag b/FV3GFSwfm/test_hera/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/test_hera/config.efcs b/FV3GFSwfm/test_hera/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/test_hera/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/test_hera/config.eobs b/FV3GFSwfm/test_hera/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/test_hera/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/test_hera/config.epos b/FV3GFSwfm/test_hera/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/test_hera/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/test_hera/config.esfc b/FV3GFSwfm/test_hera/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/test_hera/config.eupd b/FV3GFSwfm/test_hera/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/test_hera/config.fcst b/FV3GFSwfm/test_hera/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/test_hera/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/test_hera/config.fit2obs b/FV3GFSwfm/test_hera/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/test_hera/config.gempak b/FV3GFSwfm/test_hera/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/test_hera/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/test_hera/config.ice b/FV3GFSwfm/test_hera/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/test_hera/config.landanl b/FV3GFSwfm/test_hera/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/test_hera/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/test_hera/config.landanlfinal b/FV3GFSwfm/test_hera/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/test_hera/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/test_hera/config.landanlinit b/FV3GFSwfm/test_hera/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/test_hera/config.landanlrun b/FV3GFSwfm/test_hera/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/test_hera/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/test_hera/config.metp b/FV3GFSwfm/test_hera/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/test_hera/config.nsst b/FV3GFSwfm/test_hera/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/test_hera/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/test_hera/config.ocn b/FV3GFSwfm/test_hera/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/test_hera/config.ocnanal b/FV3GFSwfm/test_hera/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/test_hera/config.ocnanalbmat b/FV3GFSwfm/test_hera/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/test_hera/config.ocnanalchkpt b/FV3GFSwfm/test_hera/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/test_hera/config.ocnanalpost b/FV3GFSwfm/test_hera/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/test_hera/config.ocnanalprep b/FV3GFSwfm/test_hera/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/test_hera/config.ocnanalrun b/FV3GFSwfm/test_hera/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/test_hera/config.ocnanalvrfy b/FV3GFSwfm/test_hera/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/test_hera/config.ocnpost b/FV3GFSwfm/test_hera/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/test_hera/config.post b/FV3GFSwfm/test_hera/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/test_hera/config.postsnd b/FV3GFSwfm/test_hera/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/test_hera/config.prep b/FV3GFSwfm/test_hera/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/test_hera/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/test_hera/config.preplandobs b/FV3GFSwfm/test_hera/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/test_hera/config.resources b/FV3GFSwfm/test_hera/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/test_hera/config.resources.nco.static b/FV3GFSwfm/test_hera/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/test_hera/config.sfcanl b/FV3GFSwfm/test_hera/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/test_hera/config.ufs b/FV3GFSwfm/test_hera/config.ufs new file mode 100644 index 0000000000..5d3a16b329 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/test_hera/config.vrfy b/FV3GFSwfm/test_hera/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/test_hera/config.wafs b/FV3GFSwfm/test_hera/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/test_hera/config.wafsblending b/FV3GFSwfm/test_hera/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/test_hera/config.wafsblending0p25 b/FV3GFSwfm/test_hera/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/test_hera/config.wafsgcip b/FV3GFSwfm/test_hera/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/test_hera/config.wafsgrib2 b/FV3GFSwfm/test_hera/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/test_hera/config.wafsgrib20p25 b/FV3GFSwfm/test_hera/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/test_hera/config.wave b/FV3GFSwfm/test_hera/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/test_hera/config.waveawipsbulls b/FV3GFSwfm/test_hera/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/test_hera/config.waveawipsgridded b/FV3GFSwfm/test_hera/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/test_hera/config.wavegempak b/FV3GFSwfm/test_hera/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/test_hera/config.waveinit b/FV3GFSwfm/test_hera/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/test_hera/config.wavepostbndpnt b/FV3GFSwfm/test_hera/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/test_hera/config.wavepostbndpntbll b/FV3GFSwfm/test_hera/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/test_hera/config.wavepostpnt b/FV3GFSwfm/test_hera/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/test_hera/config.wavepostsbs b/FV3GFSwfm/test_hera/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/test_hera/config.waveprep b/FV3GFSwfm/test_hera/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/test_hera/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/test_hera/runcmds b/FV3GFSwfm/test_hera/runcmds new file mode 100644 index 0000000000..d799d2a725 --- /dev/null +++ b/FV3GFSwfm/test_hera/runcmds @@ -0,0 +1,7 @@ + + +rocotorun -w test_hera_12x12.xml -d test_hera_12x12.db +rocotostat -w test_hera_12x12.xml -d test_hera_12x12.db + + +rocotorun -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test_hera/test_hera_12x12.xml -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test_hera/test_hera_12x12.db diff --git a/FV3GFSwfm/test_hera/test_hera.crontab b/FV3GFSwfm/test_hera/test_hera.crontab new file mode 100644 index 0000000000..c2db803ce9 --- /dev/null +++ b/FV3GFSwfm/test_hera/test_hera.crontab @@ -0,0 +1,5 @@ + +#################### test_hera #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test_hera/test_hera.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test_hera/test_hera.xml +################################################################# diff --git a/FV3GFSwfm/test_hera/test_hera_12x12.xml b/FV3GFSwfm/test_hera/test_hera_12x12.xml new file mode 100644 index 0000000000..2feb840b06 --- /dev/null +++ b/FV3GFSwfm/test_hera/test_hera_12x12.xml @@ -0,0 +1,118 @@ + + + + + + + + + + + +]> + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test_hera/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 02:10:00 + + 101:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test_hera + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/test_hera + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test_hera + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/test_hera + FHRGRP#grp# + FHRLST#lst# + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/test_hera + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/config.aero b/FV3GFSwfm/test_jet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/test_jet/config.aeroanl b/FV3GFSwfm/test_jet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/test_jet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/test_jet/config.aeroanlfinal b/FV3GFSwfm/test_jet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/test_jet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/test_jet/config.aeroanlinit b/FV3GFSwfm/test_jet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/test_jet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/test_jet/config.aeroanlrun b/FV3GFSwfm/test_jet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/test_jet/config.aerosol_init b/FV3GFSwfm/test_jet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/test_jet/config.anal b/FV3GFSwfm/test_jet/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/test_jet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/test_jet/config.analcalc b/FV3GFSwfm/test_jet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/test_jet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/test_jet/config.analdiag b/FV3GFSwfm/test_jet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/test_jet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/test_jet/config.arch b/FV3GFSwfm/test_jet/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/test_jet/config.atmanl b/FV3GFSwfm/test_jet/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/test_jet/config.atmanlfinal b/FV3GFSwfm/test_jet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/test_jet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/test_jet/config.atmanlinit b/FV3GFSwfm/test_jet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/test_jet/config.atmanlrun b/FV3GFSwfm/test_jet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/test_jet/config.atmensanl b/FV3GFSwfm/test_jet/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/test_jet/config.atmensanlfinal b/FV3GFSwfm/test_jet/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/test_jet/config.atmensanlinit b/FV3GFSwfm/test_jet/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/test_jet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/test_jet/config.atmensanlrun b/FV3GFSwfm/test_jet/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/test_jet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/test_jet/config.awips b/FV3GFSwfm/test_jet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/test_jet/config.base b/FV3GFSwfm/test_jet/config.base new file mode 100644 index 0000000000..9eed77549e --- /dev/null +++ b/FV3GFSwfm/test_jet/config.base @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="hfv3gfs" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +#JKHexport PARTITION_BATCH="kjet" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR=" /lfs1/BMC/gsd-fv3-test/$USER/scratch/emc_gw" +export STMP=" /lfs1/BMC/gsd-fv3-test/$USER/scratch/stmp" +export PTMP=" /lfs1/BMC/gsd-fv3-test/$USER/scratch/ptmp" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="test" +export EXPDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/${PSLOT}" +export ROTDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/test_jet/config.com b/FV3GFSwfm/test_jet/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/test_jet/config.coupled_ic b/FV3GFSwfm/test_jet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/test_jet/config.defaults.s2sw b/FV3GFSwfm/test_jet/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/test_jet/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/test_jet/config.earc b/FV3GFSwfm/test_jet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/test_jet/config.ecen b/FV3GFSwfm/test_jet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/test_jet/config.echgres b/FV3GFSwfm/test_jet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/test_jet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/test_jet/config.ediag b/FV3GFSwfm/test_jet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/test_jet/config.efcs b/FV3GFSwfm/test_jet/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/test_jet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/test_jet/config.eobs b/FV3GFSwfm/test_jet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/test_jet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/test_jet/config.epos b/FV3GFSwfm/test_jet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/test_jet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/test_jet/config.esfc b/FV3GFSwfm/test_jet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/test_jet/config.eupd b/FV3GFSwfm/test_jet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/test_jet/config.fcst b/FV3GFSwfm/test_jet/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/test_jet/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/test_jet/config.fcst_gsl b/FV3GFSwfm/test_jet/config.fcst_gsl new file mode 100644 index 0000000000..8e2340070c --- /dev/null +++ b/FV3GFSwfm/test_jet/config.fcst_gsl @@ -0,0 +1,455 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi + +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + export sedi_semi=.false. ## JKH - 14sep + export decfl=8 ## JKH - 14sep + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export ltaerosol=".false." + export sedi_semi=.true. ## JKH - 14sep + export decfl=10 ## JKH - 14sep + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + if [[ "$CCPP_SUITE" = "FV3_RAP_cires_ugwp" ]]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsl_ruc" + elif [[ $"CCPP_SUITE" = "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + fi + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/test_jet/config.fit2obs b/FV3GFSwfm/test_jet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/test_jet/config.gempak b/FV3GFSwfm/test_jet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/test_jet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/test_jet/config.ice b/FV3GFSwfm/test_jet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/test_jet/config.landanl b/FV3GFSwfm/test_jet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/test_jet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/test_jet/config.landanlfinal b/FV3GFSwfm/test_jet/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/test_jet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/test_jet/config.landanlinit b/FV3GFSwfm/test_jet/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/test_jet/config.landanlrun b/FV3GFSwfm/test_jet/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/test_jet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/test_jet/config.metp b/FV3GFSwfm/test_jet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/test_jet/config.nsst b/FV3GFSwfm/test_jet/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/test_jet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/test_jet/config.ocn b/FV3GFSwfm/test_jet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/test_jet/config.ocnanal b/FV3GFSwfm/test_jet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/test_jet/config.ocnanalbmat b/FV3GFSwfm/test_jet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/test_jet/config.ocnanalchkpt b/FV3GFSwfm/test_jet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/test_jet/config.ocnanalpost b/FV3GFSwfm/test_jet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/test_jet/config.ocnanalprep b/FV3GFSwfm/test_jet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/test_jet/config.ocnanalrun b/FV3GFSwfm/test_jet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/test_jet/config.ocnanalvrfy b/FV3GFSwfm/test_jet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/test_jet/config.ocnpost b/FV3GFSwfm/test_jet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/test_jet/config.post b/FV3GFSwfm/test_jet/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/test_jet/config.postsnd b/FV3GFSwfm/test_jet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/test_jet/config.prep b/FV3GFSwfm/test_jet/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/test_jet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/test_jet/config.preplandobs b/FV3GFSwfm/test_jet/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/test_jet/config.resources b/FV3GFSwfm/test_jet/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/test_jet/config.resources.nco.static b/FV3GFSwfm/test_jet/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/test_jet/config.sfcanl b/FV3GFSwfm/test_jet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/test_jet/config.ufs b/FV3GFSwfm/test_jet/config.ufs new file mode 100644 index 0000000000..a253f9043a --- /dev/null +++ b/FV3GFSwfm/test_jet/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + #JKHexport WRITE_GROUP_GFS=4 + #export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/test_jet/config.vrfy b/FV3GFSwfm/test_jet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/test_jet/config.wafs b/FV3GFSwfm/test_jet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/test_jet/config.wafsblending b/FV3GFSwfm/test_jet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/test_jet/config.wafsblending0p25 b/FV3GFSwfm/test_jet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/test_jet/config.wafsgcip b/FV3GFSwfm/test_jet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/test_jet/config.wafsgrib2 b/FV3GFSwfm/test_jet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/test_jet/config.wafsgrib20p25 b/FV3GFSwfm/test_jet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/test_jet/config.wave b/FV3GFSwfm/test_jet/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/test_jet/config.waveawipsbulls b/FV3GFSwfm/test_jet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/test_jet/config.waveawipsgridded b/FV3GFSwfm/test_jet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/test_jet/config.wavegempak b/FV3GFSwfm/test_jet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/test_jet/config.waveinit b/FV3GFSwfm/test_jet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/test_jet/config.wavepostbndpnt b/FV3GFSwfm/test_jet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/test_jet/config.wavepostbndpntbll b/FV3GFSwfm/test_jet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/test_jet/config.wavepostpnt b/FV3GFSwfm/test_jet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/test_jet/config.wavepostsbs b/FV3GFSwfm/test_jet/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/test_jet/config.waveprep b/FV3GFSwfm/test_jet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/test_jet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/test_jet/test.crontab b/FV3GFSwfm/test_jet/test.crontab new file mode 100644 index 0000000000..236d36dc52 --- /dev/null +++ b/FV3GFSwfm/test_jet/test.crontab @@ -0,0 +1,5 @@ + +#################### test #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/test.db -w /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/test.xml +################################################################# diff --git a/FV3GFSwfm/test_jet/test_168h_3hrly.xml b/FV3GFSwfm/test_jet/test_168h_3hrly.xml new file mode 100644 index 0000000000..b6d82b0004 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_168h_3hrly.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + xjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + xjet + 06:00:00 + 192:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f003 _f006-f009 _f012-f015 _f018-f021 _f024-f027 _f030-f033 _f036-f039 _f042-f045 _f048-f051 _f054-f057 _f060-f063 _f066-f069 _f072-f075 _f078-f081 _f084-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 _f123-f123 _f126-f126 _f129-f129 _f132-f132 _f135-f135 _f138-f138 _f141-f141 _f144-f144 _f147-f147 _f150-f150 _f153-f153 _f156-f156 _f159-f159 _f162-f162 _f165-f165 _f168-f168 + f003 f009 f015 f021 f027 f033 f039 f045 f051 f057 f063 f069 f075 f081 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 f123 f126 f129 f132 f135 f138 f141 f144 f147 f150 f153 f156 f159 f162 f165 f168 + f000_f003 f006_f009 f012_f015 f018_f021 f024_f027 f030_f033 f036_f039 f042_f045 f048_f051 f054_f057 f060_f063 f066_f069 f072_f075 f078_f081 f084_f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 f123 f126 f129 f132 f135 f138 f141 f144 f147 f150 f153 f156 f159 f162 f165 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + xjet + 01:00:00 + 6:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_32npe_post.xml b/FV3GFSwfm/test_jet/test_32npe_post.xml new file mode 100644 index 0000000000..d41345e78a --- /dev/null +++ b/FV3GFSwfm/test_jet/test_32npe_post.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + xjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + xjet + 06:00:00 + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f003 _f006-f009 _f012-f015 _f018-f021 _f024-f027 _f030-f033 _f036-f039 _f042-f045 _f048-f051 _f054-f057 _f060-f063 _f066-f069 _f072-f075 _f078-f081 _f084-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 _f123-f123 _f126-f126 _f129-f129 _f132-f132 _f135-f135 _f138-f138 _f141-f141 _f144-f144 _f147-f147 _f150-f150 _f153-f153 _f156-f156 _f159-f159 _f162-f162 _f165-f165 _f168-f168 + f003 f009 f015 f021 f027 f033 f039 f045 f051 f057 f063 f069 f075 f081 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 f123 f126 f129 f132 f135 f138 f141 f144 f147 f150 f153 f156 f159 f162 f165 f168 + f000_f003 f006_f009 f012_f015 f018_f021 f024_f027 f030_f033 f036_f039 f042_f045 f048_f051 f054_f057 f060_f063 f066_f069 f072_f075 f078_f081 f084_f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 f123 f126 f129 f132 f135 f138 f141 f144 f147 f150 f153 f156 f159 f162 f165 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + xjet + 01:00:00 + 2:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_kjet_12x16.xml b/FV3GFSwfm/test_jet/test_kjet_12x16.xml new file mode 100644 index 0000000000..7784f0d882 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_kjet_12x16.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + kjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + kjet + 06:00:00 + 140:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + kjet + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + kjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + kjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_vjet_12x12_2wg.xml b/FV3GFSwfm/test_jet/test_vjet_12x12_2wg.xml new file mode 100644 index 0000000000..955d6915cd --- /dev/null +++ b/FV3GFSwfm/test_jet/test_vjet_12x12_2wg.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + vjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + vjet + 06:00:00 + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + vjet + 01:00:00 + 8:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_vjet_12x12_2wg_168h.xml b/FV3GFSwfm/test_jet/test_vjet_12x12_2wg_168h.xml new file mode 100644 index 0000000000..980f92ded8 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_vjet_12x12_2wg_168h.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + xjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + xjet + 06:00:00 + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + xjet + 01:00:00 + 1:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_vjet_12x12_2wg_168h_3hrly.xml b/FV3GFSwfm/test_jet/test_vjet_12x12_2wg_168h_3hrly.xml new file mode 100644 index 0000000000..3d6058227a --- /dev/null +++ b/FV3GFSwfm/test_jet/test_vjet_12x12_2wg_168h_3hrly.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + vjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + vjet + 06:00:00 + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f003 _f006-f009 _f012-f015 _f018-f021 _f024-f027 _f030-f033 _f036-f039 _f042-f045 _f048-f051 _f054-f057 _f060-f063 _f066-f069 _f072-f075 _f078-f081 _f084-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 _f123-f123 _f126-f126 _f129-f129 _f132-f132 _f135-f135 _f138-f138 _f141-f141 _f144-f144 _f147-f147 _f150-f150 _f153-f153 _f156-f156 _f159-f159 _f162-f162 _f165-f165 _f168-f168 + f003 f009 f015 f021 f027 f033 f039 f045 f051 f057 f063 f069 f075 f081 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 f123 f126 f129 f132 f135 f138 f141 f144 f147 f150 f153 f156 f159 f162 f165 f168 + f000_f003 f006_f009 f012_f015 f018_f021 f024_f027 f030_f033 f036_f039 f042_f045 f048_f051 f054_f057 f060_f063 f066_f069 f072_f075 f078_f081 f084_f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 f123 f126 f129 f132 f135 f138 f141 f144 f147 f150 f153 f156 f159 f162 f165 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + vjet + 01:00:00 + 8:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_vjet_12x12_4wg.xml b/FV3GFSwfm/test_jet/test_vjet_12x12_4wg.xml new file mode 100644 index 0000000000..0fe5e55941 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_vjet_12x12_4wg.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + vjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + vjet + 06:00:00 + 276:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + vjet + 01:00:00 + 8:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_vjet_12x16.xml b/FV3GFSwfm/test_jet/test_vjet_12x16.xml new file mode 100644 index 0000000000..e8cdeb413b --- /dev/null +++ b/FV3GFSwfm/test_jet/test_vjet_12x16.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + vjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + vjet + 06:00:00 + 348:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + vjet + 01:00:00 + 8:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_vjet_8x8_2wg.xml b/FV3GFSwfm/test_jet/test_vjet_8x8_2wg.xml new file mode 100644 index 0000000000..b578326012 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_vjet_8x8_2wg.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + vjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + vjet + 06:00:00 + 126:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + vjet + 01:00:00 + 8:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_vjet_8x8_4wg.xml b/FV3GFSwfm/test_jet/test_vjet_8x8_4wg.xml new file mode 100644 index 0000000000..9ea1c33b15 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_vjet_8x8_4wg.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + vjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + vjet + 06:00:00 + 156:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + vjet + 01:00:00 + 8:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + vjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_xjet_12x12_2wg.xml b/FV3GFSwfm/test_jet/test_xjet_12x12_2wg.xml new file mode 100644 index 0000000000..f6cdd22717 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_xjet_12x12_2wg.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + xjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + xjet + 06:00:00 + 164:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + xjet + 01:00:00 + 6:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_xjet_12x12_4wg.xml b/FV3GFSwfm/test_jet/test_xjet_12x12_4wg.xml new file mode 100644 index 0000000000..72430a4b30 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_xjet_12x12_4wg.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + xjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + xjet + 06:00:00 + 184:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + xjet + 01:00:00 + 6:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_xjet_12x16.xml b/FV3GFSwfm/test_jet/test_xjet_12x16.xml new file mode 100644 index 0000000000..ac93805a7b --- /dev/null +++ b/FV3GFSwfm/test_jet/test_xjet_12x16.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + xjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + xjet + 06:00:00 + 232:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + xjet + 01:00:00 + 6:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_xjet_8x8_2wg.xml b/FV3GFSwfm/test_jet/test_xjet_8x8_2wg.xml new file mode 100644 index 0000000000..284b55be20 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_xjet_8x8_2wg.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + xjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + xjet + 06:00:00 + 84:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + xjet + 01:00:00 + 6:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/test_jet/test_xjet_8x8_4wg.xml b/FV3GFSwfm/test_jet/test_xjet_8x8_4wg.xml new file mode 100644 index 0000000000..96f26a2842 --- /dev/null +++ b/FV3GFSwfm/test_jet/test_xjet_8x8_4wg.xml @@ -0,0 +1,260 @@ + + + + + + +]> + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/coupled_ic.sh + + &PSLOT;_gfscoupled_ic_@H + hfv3gfs + batch + xjet + 00:15:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfscoupled_ic.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_ctrl.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile1.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile2.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile3.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile4.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile5.nc + /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs/HR1/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + batch + xjet + 06:00:00 + 104:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + + _f000-f000 _f003-f003 _f006-f006 _f009-f009 _f012-f012 _f015-f015 _f018-f018 _f021-f021 _f024-f024 _f027-f027 _f030-f030 _f033-f033 _f036-f036 _f039-f039 _f042-f042 _f045-f045 _f048-f048 _f051-f051 _f054-f054 _f057-f057 _f060-f060 _f063-f063 _f066-f066 _f069-f069 _f072-f072 _f075-f075 _f078-f078 _f081-f081 _f084-f084 _f087-f087 _f090-f090 _f093-f093 _f096-f096 _f099-f099 _f102-f102 _f105-f105 _f108-f108 _f111-f111 _f114-f114 _f117-f117 _f120-f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + f000 f003 f006 f009 f012 f015 f018 f021 f024 f027 f030 f033 f036 f039 f042 f045 f048 f051 f054 f057 f060 f063 f066 f069 f072 f075 f078 f081 f084 f087 f090 f093 f096 f099 f102 f105 f108 f111 f114 f117 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + hfv3gfs + batch + xjet + 01:00:00 + 6:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + FHRGRP#grp# + FHRLST#lst# + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSrun/test + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + g2g1 g2o1 pcp1 + + + + &JOBS_DIR;/metp.sh + + &PSLOT;_gfsmetp#metpcase#_@H + hfv3gfs + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + &JOBS_DIR;/arch.sh + + &PSLOT;_gfsarch_@H + hfv3gfs + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/FV3GFSwfm/test + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT /lfs1/BMC/gsd-fv3-test/Judy.K.Henderson/scratch/stmp/RUNDIRS/test + + + + + + + + + + + diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/12x12x1wgx14wt b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/12x12x1wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aero b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.anal b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.arch b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.awips b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base new file mode 100644 index 0000000000..ad6c0cb632 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_xjet_1wg_14wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.com b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.earc b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ecen b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.echgres b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ediag b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.efcs b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.eobs b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.epos b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.esfc b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.eupd b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_gsl b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_orig b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.gempak b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.getic b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.gldas b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ice b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.init b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanl b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.metp b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.nsst b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocn b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.post b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.prep b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.resources b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ufs b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ufs new file mode 100644 index 0000000000..f3d23d6c7d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=1 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafs b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wave b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/runcmds b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml new file mode 100644 index 0000000000..9d0496dac8 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml @@ -0,0 +1,154 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 00:45:00 + xjet + 158:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 2:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/12x12x2wgx12wt b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aero b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.anal b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.analcalc b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.analdiag b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.arch b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanl b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.awips b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base new file mode 100644 index 0000000000..385b466c89 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base_f024 b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base_f024 new file mode 100644 index 0000000000..872441362a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.base_f024 @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.com b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.earc b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ecen b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.echgres b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ediag b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.efcs b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.eobs b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.epos b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.esfc b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.eupd b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst_gsl b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst_orig b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.gempak b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.getic b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.gldas b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ice b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.init b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanl b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.metp b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.nsst b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocn b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.post b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.postsnd b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.prep b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.resources b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ufs b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.vrfy b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafs b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wave b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveinit b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveprep b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/runcmds b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..fc9fb92625 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_mynn.xml -d v17_p8_mynn.db +rocotostat -w v17_p8_mynn.xml -d v17_p8_mynn.db diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.crontab b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.db_f024 b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.db_f024 new file mode 100644 index 0000000000..690d5e37d8 Binary files /dev/null and b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.db_f024 differ diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml new file mode 100644 index 0000000000..bf46485320 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 05:00:00 + xjet + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:20:00 + 1:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml_f024 b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml_f024 new file mode 100644 index 0000000000..7f06945f6b --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml_f024 @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/12x12x2wgx12wt b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aero b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.anal b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.arch b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.awips b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base new file mode 100644 index 0000000000..dcdffae411 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_12x12_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.com b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.earc b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.epos b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_gsl b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_orig b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.getic b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.gldas b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ice b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.init b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.metp b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.post b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.prep b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.resources b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wave b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/runcmds b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..f1a8d2d8cd --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_thompson.xml -d v17_p8_thompson.db +rocotostat -w v17_p8_thompson.xml -d v17_p8_thompson.db diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.db_progsigma=T b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.db_progsigma=T new file mode 100644 index 0000000000..0ac1cc8503 Binary files /dev/null and b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.db_progsigma=T differ diff --git a/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml new file mode 100644 index 0000000000..f834bf6d3c --- /dev/null +++ b/FV3GFSwfm/testing/OLDv17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/12x12x2wg b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aero b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.anal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.analcalc b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.analdiag b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.arch b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.awips b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base new file mode 100644 index 0000000000..1b9ef4c1b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_12x12_vjet_2wg" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.com b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.earc b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ecen b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.echgres b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ediag b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.efcs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.eobs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.epos b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.esfc b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.eupd b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fit2obs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.gempak b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.getic b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.gldas b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ice b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.init b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.metp b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.nsst b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocn b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnpost b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.post b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.postsnd b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.prep b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.resources b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.sfcanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ufs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ufs new file mode 100644 index 0000000000..ad28ce429b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.vrfy b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsblending b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wave b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavegempak b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveprep b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/runcmds b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/v17_p8.xml new file mode 100644 index 0000000000..c4a253d5d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg/v17_p8.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 07:00:00 + vjet + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/12x12x2wg b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aero b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.anal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.analcalc b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.analdiag b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.arch b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.awips b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base new file mode 100644 index 0000000000..c50963c5b3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_12x12_vjet_2wg_24h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.com b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.earc b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ecen b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.echgres b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ediag b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.efcs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.eobs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.epos b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.esfc b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.eupd b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.gempak b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.getic b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.gldas b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ice b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.init b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.metp b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.nsst b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocn b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.post b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.postsnd b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.prep b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.resources b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ufs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ufs new file mode 100644 index 0000000000..ad28ce429b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.vrfy b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wave b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveinit b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveprep b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/runcmds b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/v17_p8.xml new file mode 100644 index 0000000000..49660efb63 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_vjet_2wg_24h/v17_p8.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:50:00 + vjet + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/12x12 b/FV3GFSwfm/testing/v17_p8_12x12_xjet/12x12 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base new file mode 100644 index 0000000000..eb2586dc7c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_12x12_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.com b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst new file mode 120000 index 0000000000..582748bd67 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst @@ -0,0 +1 @@ +../config.fcst_new \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.init b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.post b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ufs new file mode 100644 index 0000000000..04bb91db0b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/logs/2022110900.log b/FV3GFSwfm/testing/v17_p8_12x12_xjet/logs/2022110900.log new file mode 100644 index 0000000000..b54e56451a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/logs/2022110900.log @@ -0,0 +1,36 @@ +2023-05-30 15:21:02 +0000 :: fe2 :: Submitting gfsfcst +2023-05-30 15:21:02 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:40418 +2023-05-30 15:23:12 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28614915 +2023-05-30 15:23:12 +0000 :: fe2 :: Task gfsfcst, jobid=28614915, in state RUNNING (RUNNING) +2023-05-30 15:23:50 +0000 :: fe2 :: Task gfsfcst, jobid=28614915, in state RUNNING (RUNNING) +2023-05-30 15:24:18 +0000 :: fe2 :: Task gfsfcst, jobid=28614915, in state RUNNING (RUNNING) +2023-05-30 15:31:13 +0000 :: fe2 :: Submitting gfsfcst +2023-05-30 15:31:13 +0000 :: fe2 :: Submitting gfspost_f000-f000 +2023-05-30 15:31:13 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28615213 +2023-05-30 15:31:13 +0000 :: fe2 :: Submission of gfspost_f000-f000 succeeded, jobid=28615214 +2023-05-31 14:31:51 +0000 :: fe6 :: Task gfsfcst, jobid=28614915, in state SUCCEEDED (COMPLETED), ran for 2335.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 14:31:51 +0000 :: fe6 :: Submitting gfspost_f000-f000 +2023-05-31 14:31:51 +0000 :: fe6 :: Submitting gfspost_f006-f006 +2023-05-31 14:31:56 +0000 :: fe6 :: Submitting gfspost_f012-f012 +2023-05-31 14:31:56 +0000 :: fe6 :: Submitting gfspost_f018-f018 +2023-05-31 14:31:57 +0000 :: fe6 :: Submitting gfspost_f024-f024 +2023-05-31 14:31:57 +0000 :: fe6 :: Submission of gfspost_f000-f000 succeeded, jobid=28666939 +2023-05-31 14:31:57 +0000 :: fe6 :: Submission of gfspost_f006-f006 succeeded, jobid=28666940 +2023-05-31 14:31:58 +0000 :: fe6 :: Submission of gfspost_f012-f012 succeeded, jobid=28666941 +2023-05-31 14:31:59 +0000 :: fe6 :: Submission of gfspost_f018-f018 succeeded, jobid=28666942 +2023-05-31 14:31:59 +0000 :: fe6 :: Submission of gfspost_f024-f024 succeeded, jobid=28666943 +2023-05-31 14:33:03 +0000 :: fe6 :: Task gfspost_f000-f000, jobid=28666939, in state RUNNING (RUNNING) +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f006-f006, jobid=28666940, in state RUNNING (RUNNING) +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f012-f012, jobid=28666941, in state RUNNING (RUNNING) +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f018-f018, jobid=28666942, in state RUNNING (RUNNING) +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f024-f024, jobid=28666943, in state RUNNING (RUNNING) +2023-05-31 14:33:54 +0000 :: fe6 :: Task gfspost_f000-f000, jobid=28666939, in state RUNNING (RUNNING) +2023-05-31 14:33:54 +0000 :: fe6 :: Task gfspost_f006-f006, jobid=28666940, in state RUNNING (RUNNING) +2023-05-31 14:33:54 +0000 :: fe6 :: Task gfspost_f012-f012, jobid=28666941, in state RUNNING (RUNNING) +2023-05-31 14:34:00 +0000 :: fe6 :: Task gfspost_f018-f018, jobid=28666942, in state RUNNING (RUNNING) +2023-05-31 14:34:01 +0000 :: fe6 :: Task gfspost_f024-f024, jobid=28666943, in state RUNNING (RUNNING) +2023-05-31 14:35:10 +0000 :: fe6 :: Task gfspost_f000-f000, jobid=28666939, in state RUNNING (RUNNING) +2023-05-31 14:35:10 +0000 :: fe6 :: Task gfspost_f006-f006, jobid=28666940, in state RUNNING (RUNNING) +2023-05-31 14:35:10 +0000 :: fe6 :: Task gfspost_f012-f012, jobid=28666941, in state RUNNING (RUNNING) +2023-05-31 14:35:10 +0000 :: fe6 :: Task gfspost_f018-f018, jobid=28666942, in state RUNNING (RUNNING) +2023-05-31 14:35:10 +0000 :: fe6 :: Task gfspost_f024-f024, jobid=28666943, in state RUNNING (RUNNING) diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_12x12_xjet/logs/2022111000.log new file mode 100644 index 0000000000..8b27a97822 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/logs/2022111000.log @@ -0,0 +1,79 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfsfcst, jobid=28566454, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f000-f000, jobid=28566455, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f006-f006, jobid=28566457, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f012-f012, jobid=28566459, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 16:48:27 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:32912 +2023-05-29 16:48:50 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28566687 +2023-05-29 16:48:50 +0000 :: fe2 :: Task gfsfcst, jobid=28566687, in state RUNNING (RUNNING) +2023-05-29 16:49:10 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 16:49:10 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:38302 +2023-05-29 17:17:08 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28566693 +2023-05-29 17:17:08 +0000 :: fe2 :: Task gfsfcst, jobid=28566693, in state FAILED (FAILED), ran for 178.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 17:17:08 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 17:17:08 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:39108 +2023-05-29 17:53:18 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 17:53:19 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28568635 +2023-05-29 18:16:29 +0000 :: fe3 :: Task gfsfcst, jobid=28568635, in state FAILED (FAILED), ran for 219.0 seconds, exit status=9, try=1 (of 2) +2023-05-29 18:16:29 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 18:16:29 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:45793 +2023-05-29 18:20:37 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28570074 +2023-05-29 18:20:37 +0000 :: fe3 :: Task gfsfcst, jobid=28570074, in state DEAD (FAILED), ran for 40.0 seconds, exit status=35072, try=2 (of 2) +2023-05-29 18:20:57 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 18:20:57 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36324 +2023-05-29 19:06:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28570204 +2023-05-29 19:06:15 +0000 :: fe3 :: Task gfsfcst, jobid=28570204, in state DEAD (FAILED), ran for 229.0 seconds, exit status=11, try=4 (of 2) +2023-05-29 19:06:30 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 19:06:30 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:37214 +2023-05-30 15:13:05 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28571649 +2023-05-30 15:13:05 +0000 :: fe2 :: Task gfsfcst, jobid=28571649, in state DEAD (TIMEOUT), ran for 2424.0 seconds, exit status=255, try=6 (of 2) +2023-05-30 15:13:05 +0000 :: fe2 :: This cycle is complete: Success +2023-05-30 15:24:18 +0000 :: fe2 :: Forcibly submitting gfsfcst +2023-05-30 15:24:18 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:39556 +2023-05-30 15:31:13 +0000 :: fe2 :: Submitting gfsfcst +2023-05-30 15:31:13 +0000 :: fe2 :: Submitting gfspost_f000-f000 +2023-05-30 15:31:13 +0000 :: fe2 :: Submitting gfspost_f006-f006 +2023-05-30 15:31:13 +0000 :: fe2 :: Submitting gfspost_f012-f012 +2023-05-30 15:31:13 +0000 :: fe2 :: Submitting gfspost_f018-f018 +2023-05-30 15:31:13 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28615215 +2023-05-30 15:31:13 +0000 :: fe2 :: Submission of gfspost_f000-f000 succeeded, jobid=28615216 +2023-05-30 15:31:13 +0000 :: fe2 :: Submission of gfspost_f006-f006 succeeded, jobid=28615217 +2023-05-30 15:31:13 +0000 :: fe2 :: Submission of gfspost_f012-f012 succeeded, jobid=28615218 +2023-05-30 15:31:13 +0000 :: fe2 :: Submission of gfspost_f018-f018 succeeded, jobid=28615219 +2023-05-31 14:31:51 +0000 :: fe6 :: Submission status of previously pending gfsfcst is success, jobid=28615020 +2023-05-31 14:31:51 +0000 :: fe6 :: Task gfsfcst, jobid=28615020, in state DEAD (FAILED), ran for 6422.0 seconds, exit status=11, try=8 (of 2) +2023-05-31 14:31:57 +0000 :: fe6 :: Submitting gfspost_f000-f000 +2023-05-31 14:31:57 +0000 :: fe6 :: Submitting gfspost_f006-f006 +2023-05-31 14:31:57 +0000 :: fe6 :: Submitting gfspost_f012-f012 +2023-05-31 14:31:57 +0000 :: fe6 :: Submitting gfspost_f018-f018 +2023-05-31 14:31:57 +0000 :: fe6 :: Submitting gfspost_f024-f024 +2023-05-31 14:31:59 +0000 :: fe6 :: Submission of gfspost_f000-f000 succeeded, jobid=28666944 +2023-05-31 14:31:59 +0000 :: fe6 :: Submission of gfspost_f006-f006 succeeded, jobid=28666945 +2023-05-31 14:31:59 +0000 :: fe6 :: Submission of gfspost_f012-f012 succeeded, jobid=28666946 +2023-05-31 14:31:59 +0000 :: fe6 :: Submission of gfspost_f018-f018 succeeded, jobid=28666947 +2023-05-31 14:31:59 +0000 :: fe6 :: Submission of gfspost_f024-f024 succeeded, jobid=28666948 +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f000-f000, jobid=28666944, in state RUNNING (RUNNING) +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f006-f006, jobid=28666945, in state RUNNING (RUNNING) +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f012-f012, jobid=28666946, in state RUNNING (RUNNING) +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f018-f018, jobid=28666947, in state RUNNING (RUNNING) +2023-05-31 14:33:05 +0000 :: fe6 :: Task gfspost_f024-f024, jobid=28666948, in state RUNNING (RUNNING) +2023-05-31 14:34:01 +0000 :: fe6 :: Task gfspost_f000-f000, jobid=28666944, in state FAILED (CANCELLED), ran for 101.0 seconds, exit status=15, try=1 (of 2) +2023-05-31 14:34:01 +0000 :: fe6 :: Task gfspost_f006-f006, jobid=28666945, in state FAILED (CANCELLED), ran for 101.0 seconds, exit status=15, try=1 (of 2) +2023-05-31 14:34:01 +0000 :: fe6 :: Task gfspost_f012-f012, jobid=28666946, in state FAILED (CANCELLED), ran for 101.0 seconds, exit status=15, try=1 (of 2) +2023-05-31 14:34:05 +0000 :: fe6 :: Task gfspost_f018-f018, jobid=28666947, in state FAILED (CANCELLED), ran for 101.0 seconds, exit status=15, try=1 (of 2) +2023-05-31 14:34:09 +0000 :: fe6 :: Task gfspost_f024-f024, jobid=28666948, in state FAILED (CANCELLED), ran for 101.0 seconds, exit status=15, try=1 (of 2) +2023-05-31 14:34:09 +0000 :: fe6 :: Forcibly submitting gfsfcst +2023-05-31 14:34:09 +0000 :: fe6 :: Submission status of gfsfcst is pending at druby://fe6:38058 +2023-05-31 14:35:09 +0000 :: fe6 :: Submission status of previously pending gfsfcst is success, jobid=28667013 +2023-05-31 14:35:11 +0000 :: fe6 :: Task gfsfcst, jobid=28667013, in state RUNNING (RUNNING) diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_12x12_xjet/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.db b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.db new file mode 100644 index 0000000000..b2e0922a67 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.db differ diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.xml new file mode 100644 index 0000000000..c5bf7a2d26 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8.xml @@ -0,0 +1,152 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 192:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8_ALL.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8_ALL.xml new file mode 100644 index 0000000000..1310a1eab7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8_ALL.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + vjet + 232:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8_lock.db b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8_lock.db new file mode 100644 index 0000000000..1a888d897f Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_12x12_xjet/v17_p8_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/12x12x2wg b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aero b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.anal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.analcalc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.analdiag b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.arch b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.awips b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base new file mode 100644 index 0000000000..df6c4a7f64 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_12x12_xjet_2wg" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.com b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.earc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ecen b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.echgres b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ediag b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.efcs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.eobs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.epos b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.esfc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.eupd b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst new file mode 120000 index 0000000000..582748bd67 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst @@ -0,0 +1 @@ +../config.fcst_new \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fit2obs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.gempak b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.getic b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.gldas b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ice b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.init b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.metp b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.nsst b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocn b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnpost b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.post b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.postsnd b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.prep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.resources b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.sfcanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ufs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ufs new file mode 100644 index 0000000000..ad28ce429b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.vrfy b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsblending b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wave b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavegempak b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveprep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/logs/2022111000.log new file mode 100644 index 0000000000..ee6958aafe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/logs/2022111000.log @@ -0,0 +1,2 @@ +2023-05-31 21:12:44 +0000 :: fe5 :: Submitting gfsfcst +2023-05-31 21:12:44 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:45106 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/runcmds b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.db b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.db new file mode 100644 index 0000000000..364a463e06 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.db differ diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.xml new file mode 100644 index 0000000000..0169493221 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8.xml @@ -0,0 +1,152 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 164:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8_ALL.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8_ALL.xml new file mode 100644 index 0000000000..1310a1eab7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8_ALL.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + vjet + 232:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8_lock.db b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8_lock.db new file mode 100644 index 0000000000..3721219aeb Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg/v17_p8_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/12x12 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/12x12 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aero b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.anal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.analcalc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.analdiag b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.arch b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.awips b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base new file mode 100644 index 0000000000..a6721fa859 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_12x12_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.com b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.earc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ecen b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.echgres b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ediag b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.efcs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.eobs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.epos b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.esfc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.eupd b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_link b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_link new file mode 120000 index 0000000000..582748bd67 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_link @@ -0,0 +1 @@ +../config.fcst_new \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.gempak b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.getic b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.gldas b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ice b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.init b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.metp b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.nsst b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocn b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.post b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.postsnd b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.prep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.resources b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ufs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.vrfy b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wave b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveprep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/runcmds b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..4600055097 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/runcmds @@ -0,0 +1,6 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db + +rocotorun -w v17_p8_post.xml -d v17_p8.db +rocotorun -w v17_p8_post.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8.xml new file mode 100644 index 0000000000..7c67a29770 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8.xml @@ -0,0 +1,154 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8_ALL.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8_ALL.xml new file mode 100644 index 0000000000..1310a1eab7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8_ALL.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + vjet + 232:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8_post.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8_post.xml new file mode 100644 index 0000000000..15dc7662f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_2wg_12wt/v17_p8_post.xml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:10:00 + 2:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/12x12 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/12x12 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aero b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.anal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.analcalc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.analdiag b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.arch b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.awips b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base new file mode 100644 index 0000000000..d889641a0d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_12x12_xjet_NEW" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.com b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.earc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ecen b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.echgres b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ediag b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.efcs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.eobs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.epos b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.esfc b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.eupd b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fit2obs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.gempak b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.getic b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.gldas b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ice b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.init b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.metp b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.nsst b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocn b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanal b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnpost b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.post b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.postsnd b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.prep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.resources b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.sfcanl b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ufs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ufs new file mode 100644 index 0000000000..0d5fd5da86 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.ufs @@ -0,0 +1,371 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.vrfy b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsblending b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wave b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavegempak b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveinit b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveprep b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/logs/2022110900.log b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/logs/2022110900.log new file mode 100644 index 0000000000..124b42c164 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/logs/2022110900.log @@ -0,0 +1 @@ +2023-05-29 22:06:39 +0000 :: fe3 :: This cycle is complete: Success diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/logs/2022111000.log new file mode 100644 index 0000000000..b06ae07011 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/logs/2022111000.log @@ -0,0 +1,121 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfsfcst, jobid=28566454, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f000-f000, jobid=28566455, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f006-f006, jobid=28566457, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f012-f012, jobid=28566459, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 16:48:27 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:32912 +2023-05-29 16:48:50 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28566687 +2023-05-29 16:48:50 +0000 :: fe2 :: Task gfsfcst, jobid=28566687, in state RUNNING (RUNNING) +2023-05-29 16:49:10 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 16:49:10 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:38302 +2023-05-29 17:17:08 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28566693 +2023-05-29 17:17:08 +0000 :: fe2 :: Task gfsfcst, jobid=28566693, in state FAILED (FAILED), ran for 178.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 17:17:08 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 17:17:08 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:39108 +2023-05-29 17:53:18 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 17:53:19 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28568635 +2023-05-29 18:16:29 +0000 :: fe3 :: Task gfsfcst, jobid=28568635, in state FAILED (FAILED), ran for 219.0 seconds, exit status=9, try=1 (of 2) +2023-05-29 18:16:29 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 18:16:29 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:45793 +2023-05-29 18:20:37 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28570074 +2023-05-29 18:20:37 +0000 :: fe3 :: Task gfsfcst, jobid=28570074, in state DEAD (FAILED), ran for 40.0 seconds, exit status=35072, try=2 (of 2) +2023-05-29 18:20:57 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 18:20:57 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36324 +2023-05-29 19:06:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28570204 +2023-05-29 19:06:15 +0000 :: fe3 :: Task gfsfcst, jobid=28570204, in state DEAD (FAILED), ran for 229.0 seconds, exit status=11, try=4 (of 2) +2023-05-29 19:06:30 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 19:06:30 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:37214 +2023-05-29 22:06:39 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 22:06:39 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28577723 +2023-05-29 22:25:06 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 22:25:06 +0000 :: fe1 :: Submitting gfspost_f000-f006 +2023-05-29 22:25:06 +0000 :: fe1 :: Submission status of gfspost_f000-f006 is pending at druby://fe1:44763 +2023-05-29 22:30:09 +0000 :: fe1 :: Submission status of previously pending gfspost_f000-f006 is success, jobid=28578330 +2023-05-29 22:30:09 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 22:30:09 +0000 :: fe1 :: Task gfspost_f000-f006, jobid=28578330, in state RUNNING (RUNNING) +2023-05-29 22:35:07 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 22:35:07 +0000 :: fe1 :: Task gfspost_f000-f006, jobid=28578330, in state SUCCEEDED (COMPLETED), ran for 366.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 22:40:04 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 22:40:04 +0000 :: fe1 :: Submitting gfspost_f012-f018 +2023-05-29 22:40:04 +0000 :: fe1 :: Submission status of gfspost_f012-f018 is pending at druby://fe1:38353 +2023-05-29 22:45:11 +0000 :: fe1 :: Submission status of previously pending gfspost_f012-f018 is success, jobid=28578654 +2023-05-29 22:45:11 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 22:45:11 +0000 :: fe1 :: Task gfspost_f012-f018, jobid=28578654, in state QUEUED (PENDING) +2023-05-29 22:50:11 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 22:50:11 +0000 :: fe1 :: Task gfspost_f012-f018, jobid=28578654, in state QUEUED (PENDING) +2023-05-29 22:55:04 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 22:55:04 +0000 :: fe1 :: Task gfspost_f012-f018, jobid=28578654, in state RUNNING (RUNNING) +2023-05-29 23:00:06 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:00:06 +0000 :: fe1 :: Task gfspost_f012-f018, jobid=28578654, in state SUCCEEDED (COMPLETED), ran for 299.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 23:00:06 +0000 :: fe1 :: Submitting gfspost_f024-f030 +2023-05-29 23:00:06 +0000 :: fe1 :: Submission status of gfspost_f024-f030 is pending at druby://fe1:45650 +2023-05-29 23:05:09 +0000 :: fe1 :: Submission status of previously pending gfspost_f024-f030 is success, jobid=28579246 +2023-05-29 23:05:09 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:05:09 +0000 :: fe1 :: Task gfspost_f024-f030, jobid=28579246, in state RUNNING (RUNNING) +2023-05-29 23:10:13 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:10:14 +0000 :: fe1 :: Task gfspost_f024-f030, jobid=28579246, in state SUCCEEDED (COMPLETED), ran for 302.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 23:15:05 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:15:05 +0000 :: fe1 :: Submitting gfspost_f036-f042 +2023-05-29 23:15:05 +0000 :: fe1 :: Submission of gfspost_f036-f042 succeeded, jobid=28580161 +2023-05-29 23:20:09 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:20:09 +0000 :: fe1 :: Task gfspost_f036-f042, jobid=28580161, in state RUNNING (RUNNING) +2023-05-29 23:25:12 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:25:12 +0000 :: fe1 :: Task gfspost_f036-f042, jobid=28580161, in state SUCCEEDED (COMPLETED), ran for 308.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 23:30:11 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:30:11 +0000 :: fe1 :: Submitting gfspost_f048-f054 +2023-05-29 23:30:11 +0000 :: fe1 :: Submission status of gfspost_f048-f054 is pending at druby://fe1:38455 +2023-05-29 23:35:03 +0000 :: fe1 :: Submission status of previously pending gfspost_f048-f054 is success, jobid=28580459 +2023-05-29 23:35:03 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:35:03 +0000 :: fe1 :: Task gfspost_f048-f054, jobid=28580459, in state QUEUED (PENDING) +2023-05-29 23:40:11 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:40:12 +0000 :: fe1 :: Task gfspost_f048-f054, jobid=28580459, in state QUEUED (PENDING) +2023-05-29 23:45:13 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state RUNNING (RUNNING) +2023-05-29 23:45:13 +0000 :: fe1 :: Task gfspost_f048-f054, jobid=28580459, in state QUEUED (PENDING) +2023-05-29 23:45:13 +0000 :: fe1 :: Submitting gfspost_f060-f066 +2023-05-29 23:45:13 +0000 :: fe1 :: Submission status of gfspost_f060-f066 is pending at druby://fe1:37133 +2023-05-29 23:50:12 +0000 :: fe1 :: Submission status of previously pending gfspost_f060-f066 is success, jobid=28580856 +2023-05-29 23:50:12 +0000 :: fe1 :: Task gfsfcst, jobid=28577723, in state FAILED (FAILED), ran for 6064.0 seconds, exit status=35584, try=1 (of 2) +2023-05-29 23:50:12 +0000 :: fe1 :: Task gfspost_f048-f054, jobid=28580459, in state RUNNING (RUNNING) +2023-05-29 23:50:12 +0000 :: fe1 :: Task gfspost_f060-f066, jobid=28580856, in state RUNNING (RUNNING) +2023-05-29 23:50:12 +0000 :: fe1 :: Submitting gfsfcst +2023-05-29 23:50:12 +0000 :: fe1 :: Submission status of gfsfcst is pending at druby://fe1:43254 +2023-05-29 23:55:10 +0000 :: fe1 :: Submission status of previously pending gfsfcst is success, jobid=28580948 +2023-05-29 23:55:10 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-29 23:55:10 +0000 :: fe1 :: Task gfspost_f048-f054, jobid=28580459, in state SUCCEEDED (COMPLETED), ran for 373.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 23:55:10 +0000 :: fe1 :: Task gfspost_f060-f066, jobid=28580856, in state FAILED (FAILED), ran for 200.0 seconds, exit status=256, try=1 (of 2) +2023-05-29 23:55:10 +0000 :: fe1 :: Submitting gfspost_f060-f066 +2023-05-29 23:55:10 +0000 :: fe1 :: Submission status of gfspost_f060-f066 is pending at druby://fe1:44415 +2023-05-30 00:00:09 +0000 :: fe1 :: Submission status of previously pending gfspost_f060-f066 is success, jobid=28581025 +2023-05-30 00:00:12 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:00:13 +0000 :: fe1 :: Task gfspost_f060-f066, jobid=28581025, in state RUNNING (RUNNING) +2023-05-30 00:05:10 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:05:10 +0000 :: fe1 :: Task gfspost_f060-f066, jobid=28581025, in state SUCCEEDED (COMPLETED), ran for 341.0 seconds, exit status=0, try=2 (of 2) +2023-05-30 00:10:04 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:15:08 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:20:22 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:25:06 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:30:09 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:35:08 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:40:09 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:45:06 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:50:07 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 00:55:03 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 01:00:09 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 01:05:10 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 01:10:06 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 01:15:04 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 01:20:11 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 01:25:13 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 01:30:10 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state RUNNING (RUNNING) +2023-05-30 01:35:12 +0000 :: fe1 :: Task gfsfcst, jobid=28580948, in state DEAD (FAILED), ran for 6176.0 seconds, exit status=35584, try=2 (of 2) diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/runcmds b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.db b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.db new file mode 100644 index 0000000000..0d060efda9 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.db differ diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.xml new file mode 100644 index 0000000000..a3a3b1bdda --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 05:00:00 + xjet + 184:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f006 _f012-f018 _f024-f030 _f036-f042 _f048-f054 _f060-f066 _f072-f078 _f084-f090 _f096-f102 _f108-f114 _f120-f126 _f132-f138 _f144-f150 _f156-f162 _f168-f168 + f006 f018 f030 f042 f054 f066 f078 f090 f102 f114 f126 f138 f150 f162 f168 + f000_f006 f012_f018 f024_f030 f036_f042 f048_f054 f060_f066 f072_f078 f084_f090 f096_f102 f108_f114 f120_f126 f132_f138 f144_f150 f156_f162 f168_f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8_ALL.xml b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8_ALL.xml new file mode 100644 index 0000000000..1310a1eab7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8_ALL.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + vjet + 232:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8_lock.db b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8_lock.db new file mode 100644 index 0000000000..8035bcf422 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_12x12_xjet_NEW/v17_p8_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/12x16 b/FV3GFSwfm/testing/v17_p8_12x16_xjet/12x16 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base new file mode 100644 index 0000000000..546be159b1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_12x16_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.com b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_link b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_link new file mode 120000 index 0000000000..582748bd67 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_link @@ -0,0 +1 @@ +../config.fcst_new \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.init b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.post b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ufs new file mode 100644 index 0000000000..6d0493af2c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/logs/2022110900.log b/FV3GFSwfm/testing/v17_p8_12x16_xjet/logs/2022110900.log new file mode 100644 index 0000000000..71e59c623d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/logs/2022110900.log @@ -0,0 +1,67 @@ +2023-05-30 15:21:02 +0000 :: fe2 :: Submitting gfsfcst +2023-05-30 15:21:02 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:40418 +2023-05-30 15:23:12 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28614915 +2023-05-30 15:23:12 +0000 :: fe2 :: Task gfsfcst, jobid=28614915, in state RUNNING (RUNNING) +2023-05-30 15:23:50 +0000 :: fe2 :: Task gfsfcst, jobid=28614915, in state RUNNING (RUNNING) +2023-05-30 15:24:18 +0000 :: fe2 :: Task gfsfcst, jobid=28614915, in state RUNNING (RUNNING) +2023-05-30 15:32:13 +0000 :: fe2 :: Submitting gfsfcst +2023-05-30 15:32:14 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28615242 +2023-05-30 20:48:57 +0000 :: fe3 :: Task gfsfcst, jobid=28615242, in state FAILED (TIMEOUT), ran for 1828.0 seconds, exit status=255, try=1 (of 2) +2023-05-30 20:48:57 +0000 :: fe3 :: Submitting gfsfcst +2023-05-30 20:48:57 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-30 20:48:57 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-30 20:48:57 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-30 20:48:57 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-30 20:48:57 +0000 :: fe3 :: Submission of gfsfcst failed! sbatch: error: QOSMaxWallDurationPerJobLimit +sbatch: error: Batch job submission failed: Job violates accounting/QOS policy (job submit limit, user's size and/or time limits) +2023-05-30 20:48:57 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28628921 +2023-05-30 20:48:57 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28628922 +2023-05-30 20:48:57 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28628923 +2023-05-30 20:48:57 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28628924 +2023-05-30 20:49:24 +0000 :: fe3 :: Task gfspost_f000-f000, jobid=28628921, in state RUNNING (RUNNING) +2023-05-30 20:49:24 +0000 :: fe3 :: Task gfspost_f006-f006, jobid=28628922, in state RUNNING (RUNNING) +2023-05-30 20:49:24 +0000 :: fe3 :: Task gfspost_f012-f012, jobid=28628923, in state RUNNING (RUNNING) +2023-05-30 20:49:24 +0000 :: fe3 :: Task gfspost_f018-f018, jobid=28628924, in state QUEUED (PENDING) +2023-05-30 20:49:24 +0000 :: fe3 :: Submitting gfsfcst +2023-05-30 20:49:24 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36311 +2023-05-30 20:49:29 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-30 20:49:29 +0000 :: fe3 :: Task gfspost_f000-f000, jobid=28628921, in state RUNNING (RUNNING) +2023-05-30 20:49:29 +0000 :: fe3 :: Task gfspost_f006-f006, jobid=28628922, in state RUNNING (RUNNING) +2023-05-30 20:49:29 +0000 :: fe3 :: Task gfspost_f012-f012, jobid=28628923, in state RUNNING (RUNNING) +2023-05-30 20:49:29 +0000 :: fe3 :: Task gfspost_f018-f018, jobid=28628924, in state QUEUED (PENDING) +2023-05-30 20:49:29 +0000 :: fe3 :: Submitting gfsfcst +2023-05-30 20:49:29 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:43324 +2023-05-30 20:49:44 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-30 20:49:45 +0000 :: fe3 :: Task gfspost_f000-f000, jobid=28628921, in state RUNNING (RUNNING) +2023-05-30 20:49:45 +0000 :: fe3 :: Task gfspost_f006-f006, jobid=28628922, in state RUNNING (RUNNING) +2023-05-30 20:49:45 +0000 :: fe3 :: Task gfspost_f012-f012, jobid=28628923, in state RUNNING (RUNNING) +2023-05-30 20:49:45 +0000 :: fe3 :: Task gfspost_f018-f018, jobid=28628924, in state QUEUED (PENDING) +2023-05-30 20:49:45 +0000 :: fe3 :: Submitting gfsfcst +2023-05-30 20:49:45 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:41303 +2023-05-30 20:49:51 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28628931 +2023-05-30 20:49:52 +0000 :: fe3 :: Task gfsfcst, jobid=28628931, in state QUEUED (PENDING) +2023-05-30 20:49:52 +0000 :: fe3 :: Task gfspost_f000-f000, jobid=28628921, in state RUNNING (RUNNING) +2023-05-30 20:49:52 +0000 :: fe3 :: Task gfspost_f006-f006, jobid=28628922, in state RUNNING (RUNNING) +2023-05-30 20:49:52 +0000 :: fe3 :: Task gfspost_f012-f012, jobid=28628923, in state RUNNING (RUNNING) +2023-05-30 20:49:52 +0000 :: fe3 :: Task gfspost_f018-f018, jobid=28628924, in state QUEUED (PENDING) +2023-05-31 08:07:47 +0000 :: fe3 :: Task gfsfcst, jobid=28628931, in state FAILED (FAILED), ran for 9254.0 seconds, exit status=11, try=1 (of 2) +2023-05-31 08:07:47 +0000 :: fe3 :: Task gfspost_f000-f000, jobid=28628921, in state SUCCEEDED (COMPLETED), ran for 119.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 08:07:47 +0000 :: fe3 :: Task gfspost_f006-f006, jobid=28628922, in state SUCCEEDED (COMPLETED), ran for 133.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 08:07:47 +0000 :: fe3 :: Task gfspost_f012-f012, jobid=28628923, in state SUCCEEDED (COMPLETED), ran for 133.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 08:07:47 +0000 :: fe3 :: Task gfspost_f018-f018, jobid=28628924, in state SUCCEEDED (COMPLETED), ran for 136.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 08:07:47 +0000 :: fe3 :: Submitting gfsfcst +2023-05-31 08:07:47 +0000 :: fe3 :: Submitting gfspost_f024-f024 +2023-05-31 08:07:47 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28650280 +2023-05-31 08:07:47 +0000 :: fe3 :: Submission status of gfspost_f024-f024 is pending at druby://fe3:34381 +2023-05-31 14:37:33 +0000 :: fe6 :: Submission status of previously pending gfspost_f024-f024 is success, jobid=28650281 +2023-05-31 14:37:34 +0000 :: fe6 :: Task gfsfcst, jobid=28650280, in state UNKNOWN (CANCELLED by 1254) +2023-05-31 14:37:34 +0000 :: fe6 :: Task gfspost_f024-f024, jobid=28650281, in state SUCCEEDED (COMPLETED), ran for 134.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 14:37:34 +0000 :: fe6 :: Submitting gfsvrfy +2023-05-31 14:37:34 +0000 :: fe6 :: Submission status of gfsvrfy is pending at druby://fe6:45938 +2023-05-31 14:39:26 +0000 :: fe6 :: Submission status of previously pending gfsvrfy is success, jobid=28667049 +2023-05-31 14:39:27 +0000 :: fe6 :: Task gfsfcst, jobid=28650280, in state UNKNOWN (CANCELLED by 1254) +2023-05-31 14:39:27 +0000 :: fe6 :: Task gfsvrfy, jobid=28667049, in state FAILED (FAILED), ran for 5.0 seconds, exit status=256, try=1 (of 2) +2023-05-31 14:39:27 +0000 :: fe6 :: Submitting gfsvrfy +2023-05-31 14:39:27 +0000 :: fe6 :: Submission of gfsvrfy succeeded, jobid=28667069 +2023-05-31 14:54:56 +0000 :: fe6 :: Task gfsfcst, jobid=28650280, in state DEAD (CANCELLED by 1254), giving up because job state could not be determined 3 consecutive times, try=2 (of 2) +2023-05-31 14:54:57 +0000 :: fe6 :: Task gfsvrfy, jobid=28667069, in state DEAD (FAILED), ran for 4.0 seconds, exit status=256, try=2 (of 2) diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_12x16_xjet/logs/2022111000.log new file mode 100644 index 0000000000..130040f2fe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/logs/2022111000.log @@ -0,0 +1,57 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfsfcst, jobid=28566454, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f000-f000, jobid=28566455, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f006-f006, jobid=28566457, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f012-f012, jobid=28566459, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 16:48:27 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:32912 +2023-05-29 16:48:50 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28566687 +2023-05-29 16:48:50 +0000 :: fe2 :: Task gfsfcst, jobid=28566687, in state RUNNING (RUNNING) +2023-05-29 16:49:10 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 16:49:10 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:38302 +2023-05-29 17:17:08 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28566693 +2023-05-29 17:17:08 +0000 :: fe2 :: Task gfsfcst, jobid=28566693, in state FAILED (FAILED), ran for 178.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 17:17:08 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 17:17:08 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:39108 +2023-05-29 17:53:18 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 17:53:19 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28568635 +2023-05-29 18:16:29 +0000 :: fe3 :: Task gfsfcst, jobid=28568635, in state FAILED (FAILED), ran for 219.0 seconds, exit status=9, try=1 (of 2) +2023-05-29 18:16:29 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 18:16:29 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:45793 +2023-05-29 18:20:37 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28570074 +2023-05-29 18:20:37 +0000 :: fe3 :: Task gfsfcst, jobid=28570074, in state DEAD (FAILED), ran for 40.0 seconds, exit status=35072, try=2 (of 2) +2023-05-29 18:20:57 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 18:20:57 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36324 +2023-05-29 19:06:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28570204 +2023-05-29 19:06:15 +0000 :: fe3 :: Task gfsfcst, jobid=28570204, in state DEAD (FAILED), ran for 229.0 seconds, exit status=11, try=4 (of 2) +2023-05-29 19:06:30 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 19:06:30 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:37214 +2023-05-30 15:13:05 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28571649 +2023-05-30 15:13:05 +0000 :: fe2 :: Task gfsfcst, jobid=28571649, in state DEAD (TIMEOUT), ran for 2424.0 seconds, exit status=255, try=6 (of 2) +2023-05-30 15:13:05 +0000 :: fe2 :: This cycle is complete: Success +2023-05-30 15:24:18 +0000 :: fe2 :: Forcibly submitting gfsfcst +2023-05-30 15:24:18 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:39556 +2023-05-31 14:39:27 +0000 :: fe6 :: Submitting gfsfcst +2023-05-31 14:39:28 +0000 :: fe6 :: Submission of gfsfcst succeeded, jobid=28667070 +2023-05-31 14:54:57 +0000 :: fe6 :: Task gfsfcst, jobid=28667070, in state FAILED (FAILED), ran for 254.0 seconds, exit status=35584, try=1 (of 2) +2023-05-31 14:54:57 +0000 :: fe6 :: Submitting gfsfcst +2023-05-31 14:54:57 +0000 :: fe6 :: Submission status of gfsfcst is pending at druby://fe6:41444 +2023-05-31 15:15:09 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28667481 +2023-05-31 15:15:09 +0000 :: fe3 :: Task gfsfcst, jobid=28667481, in state DEAD (FAILED), ran for 165.0 seconds, exit status=11, try=2 (of 2) +2023-05-31 15:15:21 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-31 15:15:21 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:40243 +2023-05-31 15:17:08 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28668905 +2023-05-31 15:17:08 +0000 :: fe3 :: Task gfsfcst, jobid=28668905, in state QUEUED (PENDING) +2023-05-31 15:17:27 +0000 :: fe3 :: Task gfsfcst, jobid=28668905, in state DEAD (CANCELLED), ran for 0.0 seconds, exit status=255, try=4 (of 2) +2023-05-31 15:17:27 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-31 15:17:27 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:32792 diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_12x16_xjet/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.db b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.db new file mode 100644 index 0000000000..1e1ef54d12 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.db differ diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.xml new file mode 100644 index 0000000000..c4f371581c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8.xml @@ -0,0 +1,156 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 00:30:00 + 244:ppn=24:tpp=1 + xjet + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8_ALL.xml b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8_ALL.xml new file mode 100644 index 0000000000..1310a1eab7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8_ALL.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + vjet + 232:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8_lock.db b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8_lock.db new file mode 100644 index 0000000000..bdadb9edc0 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_12x16_xjet/v17_p8_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/8x8x2wg b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/8x8x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aero b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanl b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.anal b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.analcalc b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.analdiag b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.arch b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanl b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanl b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.awips b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base new file mode 100644 index 0000000000..8c3a56cb21 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_8x8_vjet_2wg" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.com b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.earc b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ecen b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.echgres b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ediag b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.efcs b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.eobs b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.epos b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.esfc b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.eupd b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fit2obs b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.gempak b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.getic b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.gldas b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ice b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.init b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanl b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlinit b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlrun b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.metp b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.nsst b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocn b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanal b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnpost b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.post b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.postsnd b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.prep b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.resources b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.sfcanl b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ufs b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ufs new file mode 100644 index 0000000000..a04f6ba9d9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=8 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=8 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.vrfy b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafs b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsblending b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wave b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavegempak b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveinit b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveprep b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/runcmds b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/v17_p8.xml new file mode 100644 index 0000000000..d3d8588fa3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_vjet_2wg/v17_p8.xml @@ -0,0 +1,152 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:20:00 + vjet + 126:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/8x8x2wgx12wt b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/8x8x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aero b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.anal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.analcalc b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.analdiag b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.arch b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.awips b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base new file mode 100644 index 0000000000..09b1d086e9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_8x8_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.com b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.earc b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ecen b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.echgres b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ediag b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.efcs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.eobs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.epos b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.esfc b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.eupd b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.gempak b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.getic b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.gldas b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ice b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.init b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.metp b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.nsst b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocn b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.post b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.postsnd b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.prep b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.resources b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ufs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..88ca9385ab --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=8 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=8 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.vrfy b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wave b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveprep b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/runcmds b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8.xml new file mode 100644 index 0000000000..3665627c8b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:30:00 + xjet + 88:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8_ALL.xml b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8_ALL.xml new file mode 100644 index 0000000000..1310a1eab7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt/v17_p8_ALL.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + vjet + 232:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/8x8x2wgx12wt b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/8x8x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aero b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.anal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.analcalc b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.analdiag b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.arch b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.awips b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base new file mode 100644 index 0000000000..e75ba47902 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_8x8_xjet_2wg_12wt_168h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base_168h b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base_168h new file mode 100644 index 0000000000..12ead56030 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.base_168h @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_8x8_xjet_2wg_12wt_168h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.com b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.earc b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ecen b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.echgres b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ediag b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.efcs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.eobs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.epos b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.esfc b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.eupd b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.gempak b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.getic b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.gldas b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ice b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.init b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.metp b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.nsst b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocn b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.post b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.postsnd b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.prep b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.resources b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ufs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ufs new file mode 100644 index 0000000000..88ca9385ab --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=8 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=8 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.vrfy b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wave b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveinit b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveprep b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/runcmds b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/runcmds new file mode 100644 index 0000000000..b4afe06fc0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/runcmds @@ -0,0 +1,6 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db + +rocotorun -w v17_p8_24h.xml -d v17_p8_24h.db +rocotostat -w v17_p8_24h.xml -d v17_p8_24h.db diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8.xml_168h b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8.xml_168h new file mode 100644 index 0000000000..a6e8742a08 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8.xml_168h @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 07:40:00 + xjet + 88:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8_24h.xml b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8_24h.xml new file mode 100644 index 0000000000..41b6b62002 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_8x8_xjet_2wg_12wt_168h/v17_p8_24h.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + xjet + 88:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/12x12x2wgx12wt b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/2nodes_post b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/2nodes_post new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base new file mode 100644 index 0000000000..7e6d00308b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base @@ -0,0 +1,383 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${ROTDIR}/.." +export PTMP="${ROTDIR}/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_2wg_12wt" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#JKHexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#JKHexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#JKHexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#JKHexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ### JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.resources new file mode 100644 index 0000000000..4f2f89ab6a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.resources @@ -0,0 +1,974 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.xml new file mode 100644 index 0000000000..9fdf768dbb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.xml @@ -0,0 +1,165 @@ + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:00:00 + + 101:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/&PSLOT;/ + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/12x12x1wgx14wt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/12x12x1wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base new file mode 100644 index 0000000000..65ec5df02f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_vjet_1wg_14wt_24h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ufs new file mode 100644 index 0000000000..f3d23d6c7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=1 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/v17_p8_c3.xml new file mode 100644 index 0000000000..d64b8ecdfc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_1wg_14wt_24h/v17_p8_c3.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + vjet + 237:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/12x12x2wgx12wt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base new file mode 100644 index 0000000000..2a3d664519 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_vjet_2wg_12wt_24h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/v17_p8_c3.xml new file mode 100644 index 0000000000..abc6c7e6a7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_12wt_24h/v17_p8_c3.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + vjet + 252:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/12x12x2wg b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base new file mode 100644 index 0000000000..1a3f74593d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_vjet_2wg_24h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ufs new file mode 100644 index 0000000000..ad28ce429b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/v17_p8_c3.xml new file mode 100644 index 0000000000..1f7fdf1173 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_vjet_2wg_24h/v17_p8_c3.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 02:00:00 + vjet + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/12x12 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/12x12 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base new file mode 100644 index 0000000000..369d8d7e4e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst new file mode 100644 index 0000000000..55745cafde --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst @@ -0,0 +1,429 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model_haiqin.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF +#JKH export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +#JKH export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst.org b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst.org new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst.org @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ufs new file mode 100644 index 0000000000..0d5fd5da86 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.ufs @@ -0,0 +1,371 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/input.nml b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/input.nml new file mode 100644 index 0000000000..891c60fee7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/input.nml @@ -0,0 +1,386 @@ +&atmos_model_nml + blocksize = 32 + chksum_debug = .false. + dycore_only = .false. + ccpp_suite = 'FV3_GFS_v17_p8_c3' +/ + +&diag_manager_nml + prepend_date = .false. + max_output_fields = 310 +/ + +&fms_io_nml + checksum_required = .false. + max_files_r = 100 + max_files_w = 100 +/ + +&mpp_io_nml +shuffle=1 +deflate_level=1 +/ + +&fms_nml + clock_grain = 'ROUTINE' + domains_stack_size = 3000000 + print_memory_usage = .false. +/ + +&fv_core_nml + layout = 3,8 + io_layout = 1,1 + npx = 97 + npy = 97 + ntiles = 6 + npz = 127 + dz_min = 6 + psm_bc = 1 + grid_type = -1 + make_nh = .true. + fv_debug = .false. + range_warn = .true. + reset_eta = .false. + n_sponge = 42 + nudge_qv = .true. + nudge_dz = .false. + tau = 10.0 + rf_cutoff = 7.5e2 + d2_bg_k1 = 0.20 + d2_bg_k2 = 0.04 + kord_tm = -9 + kord_mt = 9 + kord_wz = 9 + kord_tr = 9 + hydrostatic = .false. + phys_hydrostatic = .false. + use_hydro_pressure = .false. + beta = 0. + a_imp = 1. + p_fac = 0.1 + k_split = 2 + n_split = 6 + nwat = 6 + na_init = 1 + d_ext = 0. + dnats = 0 + fv_sg_adj = 450 + d2_bg = 0. + nord = 2 + dddmp = 0.1 + d4_bg = 0.12 + vtdm4 = 0.02 + delt_max = 0.002 + ke_bg = 0. + do_vort_damp = .true. + external_ic = .true. + external_eta = .true. + gfs_phil = .false. + nggps_ic = .true. + mountain = .false. + ncep_ic = .false. + d_con = 1. + hord_mt = 5 + hord_vt = 5 + hord_tm = 5 + hord_dp = -5 + hord_tr = 8 + adjust_dry_mass = .false. + dry_mass=98320.0 + consv_te = 1. + do_sat_adj = .false. + consv_am = .false. + fill = .true. + dwind_2d = .false. + print_freq = 6 + warm_start = .false. + no_dycore = .false. + z_tracer = .true. + agrid_vel_rst = .true. + read_increment = .false. + res_latlon_dynamics = '' +/ + +&external_ic_nml + filtered_terrain = .true. + levp = 128 + gfs_dwinds = .true. + checker_tr = .false. + nt_checker = 0 +/ + +&gfs_physics_nml + fhzero = 6 + h2o_phys = .true. + ldiag3d = .false. + qdiag3d = .false. + print_diff_pgr = .false. + fhcyc = 24 + use_ufo = .true. + pre_rad = .false. + imp_physics = 8 + iovr = 3 + ltaerosol = .false. + lradar = .false. + ttendlim = -999 + dt_inner = 720 + sedi_semi = .true. + decfl = 10 + oz_phys = .false. + oz_phys_2015 = .true. + lsoil_lsm = 4 + do_mynnedmf = .false. + do_mynnsfclay = .false. + icloud_bl = 1 + bl_mynn_edmf = 1 + bl_mynn_tkeadvect = .true. + bl_mynn_edmf_mom = 1 + do_ugwp = .false. + do_tofd = .false. + gwd_opt = 2 + do_ugwp_v0 = .true. + do_ugwp_v1 = .false. + do_ugwp_v0_orog_only = .false. + do_ugwp_v0_nst_only = .false. + do_gsl_drag_ls_bl = .false. + do_gsl_drag_ss = .true. + do_gsl_drag_tofd = .false. + do_ugwp_v1_orog_only = .false. + min_lakeice = 0.15 + min_seaice = 0.15 + use_cice_alb = .false. + pdfcld = .false. + fhswr = 3600. + fhlwr = 3600. + ialb = 2 + iems = 2 + iaer = 1011 + icliq_sw = 2 + ico2 = 2 + isubc_sw = 2 + isubc_lw = 2 + isol = 2 + lwhtr = .true. + swhtr = .true. + cnvgwd = .true. + shal_cnv = .true. + cal_pre = .false. + redrag = .true. + dspheat = .true. + hybedmf = .false. + satmedmf = .true. + isatmedmf = 1 + lheatstrg = .false. + lseaspray = .true. + random_clds = .false. + trans_trac = .true. + cnvcld = .true. + imfshalcnv = 5 + imfdeepcnv = 5 + icoldpool = 0 + ras = .false. + cdmbgwd = 0.14,1.8,1.0,1.0 + prslrd0 = 0. + ivegsrc = 1 + isot = 1 + lsoil = 4 + lsm = 2 + iopt_dveg = 4 + iopt_crs = 2 + iopt_btr = 1 + iopt_run = 1 + iopt_sfc = 3 + iopt_trs = 2 + iopt_frz = 1 + iopt_inf = 1 + iopt_rad = 3 + iopt_alb = 1 + iopt_snf = 4 + iopt_tbot = 2 + iopt_stc = 3 + debug = .false. + nstf_name = 2,1,0,0,0 + nst_anl = .true. + psautco = 0.0008,0.0005 + prautco = 0.00015,0.00015 + lgfdlmprad = .false. + effr_in = .true. + ldiag_ugwp = .false. + fscav_aero = "*:0.3","so2:0.0","msa:0.0","dms:0.0","nh3:0.4","nh4:0.6","bc1:0.6","bc2:0.6","oc1:0.4","oc2:0.4","dust1:0.6","dust2:0.6","dust3:0.6","dust4:0.6","dust5:0.6","seas1:0.5","seas2:0.5","seas3:0.5","seas4:0.5","seas5:0.5" + do_sppt = .false. + do_shum = .false. + do_skeb = .false. + do_RRTMGP = .false. + active_gases = 'h2o_co2_o3_n2o_ch4_o2' + ngases = 6 + lw_file_gas = 'rrtmgp-data-lw-g128-210809.nc' + lw_file_clouds = 'rrtmgp-cloud-optics-coeffs-lw.nc' + sw_file_gas = 'rrtmgp-data-sw-g112-210809.nc' + sw_file_clouds = 'rrtmgp-cloud-optics-coeffs-sw.nc' + rrtmgp_nGptsSW = 112 + rrtmgp_nGptsLW = 128 + rrtmgp_nBandsLW = 16 + rrtmgp_nBandsSW = 14 + doGP_cldoptics_LUT = .true. + doGP_lwscat = .true. + doGP_sgs_cnv = .true. + use_med_flux = .false. + frac_grid = .true. + cplchm = .false. + cplflx = .false. + cplice = .false. + cplwav = .false. + cplwav2atm = .false. + cpllnd = .false. + do_ca = .true. + ca_global = .false. + ca_sgs = .true. + nca = 1 + ncells = 5 + nlives = 12 + nseed = 1 + nfracseed = 0.5 + nthresh = 18 + ca_trigger = .true. + nspinup = 1 + iseed_ca = 12345 + lndp_type = 0 + n_var_lndp = 0 +/ + +&cires_ugwp_nml + knob_ugwp_solver = 2 + knob_ugwp_source = 1,1,0,0 + knob_ugwp_wvspec = 1,25,25,25 + knob_ugwp_azdir = 2,4,4,4 + knob_ugwp_stoch = 0,0,0,0 + knob_ugwp_effac = 1,1,1,1 + knob_ugwp_doaxyz = 1 + knob_ugwp_doheat = 1 + knob_ugwp_dokdis = 1 + knob_ugwp_ndx4lh = 1 + knob_ugwp_version = 0 + launch_level = 54 +/ + +&gfdl_cloud_microphysics_nml + sedi_transport = .true. + do_sedi_heat = .false. + rad_snow = .true. + rad_graupel = .true. + rad_rain = .true. + const_vi = .false. + const_vs = .false. + const_vg = .false. + const_vr = .false. + vi_max = 1. + vs_max = 2. + vg_max = 12. + vr_max = 12. + qi_lim = 1. + prog_ccn = .false. + do_qa = .true. + fast_sat_adj = .true. + tau_l2v = 225. + tau_v2l = 150. + tau_g2v = 900. + rthresh = 10.e-6 ! This is a key parameter for cloud water + dw_land = 0.16 + dw_ocean = 0.10 + ql_gen = 1.0e-3 + ql_mlt = 1.0e-3 + qi0_crt = 8.0E-5 + qs0_crt = 1.0e-3 + tau_i2s = 1000. + c_psaci = 0.05 + c_pgacs = 0.01 + rh_inc = 0.30 + rh_inr = 0.30 + rh_ins = 0.30 + ccn_l = 300. + ccn_o = 100. + c_paut = 0.5 + c_cracw = 0.8 + use_ppm = .false. + use_ccn = .true. + mono_prof = .true. + z_slope_liq = .true. + z_slope_ice = .true. + de_ice = .false. + fix_negative = .true. + icloud_f = 1 + mp_time = 150. + reiflag = 2 +/ + +&interpolator_nml + interp_method = 'conserve_great_circle' +/ + +&namsfc + FNGLAC = 'global_glacier.2x2.grb' + FNMXIC = 'global_maxice.2x2.grb' + FNTSFC = 'RTGSST.1982.2012.monthly.clim.grb' + FNSNOC = 'global_snoclim.1.875.grb' + FNZORC = 'igbp' + FNALBC = 'C96.snowfree_albedo.tileX.nc' + FNALBC2 = 'C96.facsf.tileX.nc' + FNAISC = 'IMS-NIC.blended.ice.monthly.clim.grb' + FNTG3C = 'C96.substrate_temperature.tileX.nc' + FNVEGC = 'C96.vegetation_greenness.tileX.nc' + FNVETC = 'C96.vegetation_type.tileX.nc' + FNSOTC = 'C96.soil_type.tileX.nc' + FNSMCC = 'global_soilmgldas.statsgo.t1534.3072.1536.grb' + FNMSKH = 'global_slmask.t1534.3072.1536.grb' + FNTSFA = '' + FNACNA = '' + FNSNOA = '' + FNVMNC = 'C96.vegetation_greenness.tileX.nc' + FNVMXC = 'C96.vegetation_greenness.tileX.nc' + FNSLPC = 'C96.slope_type.tileX.nc' + FNABSC = 'C96.maximum_snow_albedo.tileX.nc' + LDEBUG =.false. + FSMCL(2) = 99999 + FSMCL(3) = 99999 + FSMCL(4) = 99999 + LANDICE = .false. + FTSFS = 90 + FAISL = 99999 + FAISS = 99999 + FSNOL = 99999 + FSNOS = 99999 + FSICL = 0 + FSICS = 0 + FTSFL = 99999 + FVETL = 99999 + FSOTL = 99999 + FvmnL = 99999 + FvmxL = 99999 + FSLPL = 99999 + FABSL = 99999 +/ + +&fv_grid_nml + grid_file = 'INPUT/grid_spec.nc' +/ + +&nam_stochy +/ + +&nam_sfcperts + lndp_type = 0 + lndp_model_type = 0 + LNDP_TAU=21600, + LNDP_LSCALE=500000, + ISEED_LNDP=2010, + lndp_var_list = 'XXX' + lndp_prt_list = -999 +/ + +&MOM_input_nml + output_directory = 'MOM6_OUTPUT/', + input_filename = 'n' + restart_input_dir = 'INPUT/', + restart_output_dir = 'RESTART/', + parameter_filename = 'INPUT/MOM_input', + 'INPUT/MOM_override'/ diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/jkhINFO b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/jkhINFO new file mode 100644 index 0000000000..e37930dc98 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/jkhINFO @@ -0,0 +1,4 @@ + +try c3 + - set progsigma to .true. for now + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/logs/2022111000.log new file mode 100644 index 0000000000..f792327c1e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/logs/2022111000.log @@ -0,0 +1,75 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-28 07:30:29 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28493642 +2023-05-28 07:30:30 +0000 :: fe3 :: Task gfsfcst, jobid=28493642, in state DEAD (TIMEOUT), ran for 2404.0 seconds, exit status=15, try=2 (of 2) +2023-05-28 07:34:01 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 07:34:01 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:39381 +2023-05-28 19:08:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28495562 +2023-05-28 19:08:14 +0000 :: fe3 :: Task gfsfcst, jobid=28495562, in state FAILED (TIMEOUT), ran for 2413.0 seconds, exit status=255, try=1 (of 2) +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28521683 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28521684 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28521685 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28521686 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28521687 +2023-05-29 07:52:24 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 07:52:24 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28547487 +2023-05-29 08:33:19 +0000 :: fe3 :: Task gfsfcst, jobid=28547487, in state FAILED (FAILED), ran for 263.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 08:33:19 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 08:33:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:46395 +2023-05-29 18:37:39 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 18:37:39 +0000 :: fe5 :: Submission of gfsfcst succeeded, jobid=28570547 +2023-05-29 18:39:30 +0000 :: fe5 :: Task gfsfcst, jobid=28570547, in state FAILED (FAILED), ran for 12.0 seconds, exit status=32512, try=1 (of 2) +2023-05-29 18:39:30 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 18:39:30 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:34989 +2023-05-29 22:25:28 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28570561 +2023-05-29 22:25:28 +0000 :: fe2 :: Task gfsfcst, jobid=28570561, in state DEAD (FAILED), ran for 212.0 seconds, exit status=11, try=2 (of 2) +2023-05-29 22:29:20 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 22:29:20 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:35151 +2023-05-29 23:06:16 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 23:06:17 +0000 :: fe2 :: Submission of gfsfcst failed! sbatch: error: QOSMaxWallDurationPerJobLimit +sbatch: error: Batch job submission failed: Job violates accounting/QOS policy (job submit limit, user's size and/or time limits) +2023-05-29 23:06:38 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 23:06:38 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:43226 +2023-05-29 23:07:07 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28579860 +2023-05-29 23:07:07 +0000 :: fe2 :: Task gfsfcst, jobid=28579860, in state QUEUED (PENDING) +2023-05-29 23:07:56 +0000 :: fe2 :: Task gfsfcst, jobid=28579860, in state FAILED (CANCELLED), ran for 0.0 seconds, exit status=255, try=1 (of 2) +2023-05-29 23:07:56 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 23:07:56 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:42906 +2023-05-29 23:08:42 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28579863 +2023-05-29 23:08:42 +0000 :: fe2 :: Task gfsfcst, jobid=28579863, in state QUEUED (PENDING) +2023-05-29 23:10:21 +0000 :: fe2 :: Task gfsfcst, jobid=28579863, in state DEAD (FAILED), ran for 56.0 seconds, exit status=35072, try=2 (of 2) +2023-05-29 23:11:07 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 23:11:07 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:45028 +2023-05-30 08:17:50 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28580141 +2023-05-30 08:17:51 +0000 :: fe3 :: Task gfsfcst, jobid=28580141, in state FAILED (FAILED), ran for 2445.0 seconds, exit status=11, try=1 (of 2) +2023-05-30 08:17:51 +0000 :: fe3 :: Submitting gfsfcst +2023-05-30 08:17:51 +0000 :: fe3 :: Submitting gfspost_f000-f006 +2023-05-30 08:17:51 +0000 :: fe3 :: Submitting gfspost_f012-f018 +2023-05-30 08:17:51 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28599193 +2023-05-30 08:17:51 +0000 :: fe3 :: Submission of gfspost_f000-f006 succeeded, jobid=28599194 +2023-05-30 08:17:52 +0000 :: fe3 :: Submission of gfspost_f012-f018 succeeded, jobid=28599195 +2023-05-31 08:40:56 +0000 :: fe2 :: Task gfsfcst, jobid=28599193, in state DEAD (FAILED), ran for 2273.0 seconds, exit status=11, try=2 (of 2) +2023-05-31 08:40:56 +0000 :: fe2 :: Task gfspost_f000-f006, jobid=28599194, in state SUCCEEDED (COMPLETED), ran for 257.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 08:40:56 +0000 :: fe2 :: Task gfspost_f012-f018, jobid=28599195, in state SUCCEEDED (COMPLETED), ran for 269.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 08:41:14 +0000 :: fe2 :: Forcibly submitting gfsfcst +2023-05-31 08:41:14 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:42295 +2023-05-31 08:44:58 +0000 :: fe2 :: Submitting gfsfcst +2023-05-31 08:44:59 +0000 :: fe2 :: Submitting gfspost_f000-f006 +2023-05-31 08:44:59 +0000 :: fe2 :: Submitting gfspost_f012-f018 +2023-05-31 08:44:59 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28651211 +2023-05-31 08:44:59 +0000 :: fe2 :: Submission of gfspost_f000-f006 succeeded, jobid=28651212 +2023-05-31 08:44:59 +0000 :: fe2 :: Submission of gfspost_f012-f018 succeeded, jobid=28651213 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.db b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.db new file mode 100644 index 0000000000..62db7c1343 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.db differ diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.xml new file mode 100644 index 0000000000..faa7b70b0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3.xml @@ -0,0 +1,154 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + + batch + 05:00:00 + 184:ppn=24:tpp=1 + xjet + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f006 _f012-f018 _f024-f030 _f036-f042 _f048-f054 _f060-f066 _f072-f078 _f084-f090 _f096-f102 _f108-f114 _f120-f126 _f132-f138 _f144-f150 _f156-f162 _f168-f168 + f006 f018 f030 f042 f054 f066 f078 f090 f102 f114 f126 f138 f150 f162 f168 + f000_f006 f012_f018 f024_f030 f036_f042 f048_f054 f060_f066 f072_f078 f084_f090 f096_f102 f108_f114 f120_f126 f132_f138 f144_f150 f156_f162 f168_f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3_lock.db b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3_lock.db new file mode 100644 index 0000000000..ccecceeb4b Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet/v17_p8_c3_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/12x12x1wgx14wt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/12x12x1wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base new file mode 100644 index 0000000000..ad6c0cb632 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_xjet_1wg_14wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ufs new file mode 100644 index 0000000000..f3d23d6c7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=1 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml new file mode 100644 index 0000000000..9d0496dac8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml @@ -0,0 +1,154 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 00:45:00 + xjet + 158:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 2:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/12x12x2wgx12wt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base new file mode 100644 index 0000000000..9ec4fc7f50 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/v17_p8_c3.xml new file mode 100644 index 0000000000..7e17b28cd0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt/v17_p8_c3.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/12x12x2wgx12wt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base new file mode 100644 index 0000000000..ef3179f085 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_xjet_2wg_12wt_168h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/v17_p8_c3.xml new file mode 100644 index 0000000000..ade7054917 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_12wt_168h/v17_p8_c3.xml @@ -0,0 +1,159 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + xjet + 05:00:00 + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:20:00 + + 3:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/12x12x2wgx14wt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/12x12x2wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base new file mode 100644 index 0000000000..ab39a29d7b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_xjet_2wg_14wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ufs new file mode 100644 index 0000000000..67ceb4c1a4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/v17_p8_c3.xml new file mode 100644 index 0000000000..569c529b11 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x12_xjet_2wg_14wt/v17_p8_c3.xml @@ -0,0 +1,152 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 172:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 2:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/12x16 b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/12x16 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base new file mode 100644 index 0000000000..ca44b8257d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x16_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-72} +export FHMAX_GFS_06=${FHMAX_GFS_06:-72} +export FHMAX_GFS_12=${FHMAX_GFS_12:-72} +export FHMAX_GFS_18=${FHMAX_GFS_18:-72} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.com b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst new file mode 100644 index 0000000000..55745cafde --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst @@ -0,0 +1,429 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model_haiqin.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF +#JKH export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +#JKH export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst.org b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst.org new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst.org @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.init b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.post b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ufs new file mode 100644 index 0000000000..a96ba126e2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.ufs @@ -0,0 +1,370 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/input.nml b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/input.nml new file mode 100644 index 0000000000..891c60fee7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/input.nml @@ -0,0 +1,386 @@ +&atmos_model_nml + blocksize = 32 + chksum_debug = .false. + dycore_only = .false. + ccpp_suite = 'FV3_GFS_v17_p8_c3' +/ + +&diag_manager_nml + prepend_date = .false. + max_output_fields = 310 +/ + +&fms_io_nml + checksum_required = .false. + max_files_r = 100 + max_files_w = 100 +/ + +&mpp_io_nml +shuffle=1 +deflate_level=1 +/ + +&fms_nml + clock_grain = 'ROUTINE' + domains_stack_size = 3000000 + print_memory_usage = .false. +/ + +&fv_core_nml + layout = 3,8 + io_layout = 1,1 + npx = 97 + npy = 97 + ntiles = 6 + npz = 127 + dz_min = 6 + psm_bc = 1 + grid_type = -1 + make_nh = .true. + fv_debug = .false. + range_warn = .true. + reset_eta = .false. + n_sponge = 42 + nudge_qv = .true. + nudge_dz = .false. + tau = 10.0 + rf_cutoff = 7.5e2 + d2_bg_k1 = 0.20 + d2_bg_k2 = 0.04 + kord_tm = -9 + kord_mt = 9 + kord_wz = 9 + kord_tr = 9 + hydrostatic = .false. + phys_hydrostatic = .false. + use_hydro_pressure = .false. + beta = 0. + a_imp = 1. + p_fac = 0.1 + k_split = 2 + n_split = 6 + nwat = 6 + na_init = 1 + d_ext = 0. + dnats = 0 + fv_sg_adj = 450 + d2_bg = 0. + nord = 2 + dddmp = 0.1 + d4_bg = 0.12 + vtdm4 = 0.02 + delt_max = 0.002 + ke_bg = 0. + do_vort_damp = .true. + external_ic = .true. + external_eta = .true. + gfs_phil = .false. + nggps_ic = .true. + mountain = .false. + ncep_ic = .false. + d_con = 1. + hord_mt = 5 + hord_vt = 5 + hord_tm = 5 + hord_dp = -5 + hord_tr = 8 + adjust_dry_mass = .false. + dry_mass=98320.0 + consv_te = 1. + do_sat_adj = .false. + consv_am = .false. + fill = .true. + dwind_2d = .false. + print_freq = 6 + warm_start = .false. + no_dycore = .false. + z_tracer = .true. + agrid_vel_rst = .true. + read_increment = .false. + res_latlon_dynamics = '' +/ + +&external_ic_nml + filtered_terrain = .true. + levp = 128 + gfs_dwinds = .true. + checker_tr = .false. + nt_checker = 0 +/ + +&gfs_physics_nml + fhzero = 6 + h2o_phys = .true. + ldiag3d = .false. + qdiag3d = .false. + print_diff_pgr = .false. + fhcyc = 24 + use_ufo = .true. + pre_rad = .false. + imp_physics = 8 + iovr = 3 + ltaerosol = .false. + lradar = .false. + ttendlim = -999 + dt_inner = 720 + sedi_semi = .true. + decfl = 10 + oz_phys = .false. + oz_phys_2015 = .true. + lsoil_lsm = 4 + do_mynnedmf = .false. + do_mynnsfclay = .false. + icloud_bl = 1 + bl_mynn_edmf = 1 + bl_mynn_tkeadvect = .true. + bl_mynn_edmf_mom = 1 + do_ugwp = .false. + do_tofd = .false. + gwd_opt = 2 + do_ugwp_v0 = .true. + do_ugwp_v1 = .false. + do_ugwp_v0_orog_only = .false. + do_ugwp_v0_nst_only = .false. + do_gsl_drag_ls_bl = .false. + do_gsl_drag_ss = .true. + do_gsl_drag_tofd = .false. + do_ugwp_v1_orog_only = .false. + min_lakeice = 0.15 + min_seaice = 0.15 + use_cice_alb = .false. + pdfcld = .false. + fhswr = 3600. + fhlwr = 3600. + ialb = 2 + iems = 2 + iaer = 1011 + icliq_sw = 2 + ico2 = 2 + isubc_sw = 2 + isubc_lw = 2 + isol = 2 + lwhtr = .true. + swhtr = .true. + cnvgwd = .true. + shal_cnv = .true. + cal_pre = .false. + redrag = .true. + dspheat = .true. + hybedmf = .false. + satmedmf = .true. + isatmedmf = 1 + lheatstrg = .false. + lseaspray = .true. + random_clds = .false. + trans_trac = .true. + cnvcld = .true. + imfshalcnv = 5 + imfdeepcnv = 5 + icoldpool = 0 + ras = .false. + cdmbgwd = 0.14,1.8,1.0,1.0 + prslrd0 = 0. + ivegsrc = 1 + isot = 1 + lsoil = 4 + lsm = 2 + iopt_dveg = 4 + iopt_crs = 2 + iopt_btr = 1 + iopt_run = 1 + iopt_sfc = 3 + iopt_trs = 2 + iopt_frz = 1 + iopt_inf = 1 + iopt_rad = 3 + iopt_alb = 1 + iopt_snf = 4 + iopt_tbot = 2 + iopt_stc = 3 + debug = .false. + nstf_name = 2,1,0,0,0 + nst_anl = .true. + psautco = 0.0008,0.0005 + prautco = 0.00015,0.00015 + lgfdlmprad = .false. + effr_in = .true. + ldiag_ugwp = .false. + fscav_aero = "*:0.3","so2:0.0","msa:0.0","dms:0.0","nh3:0.4","nh4:0.6","bc1:0.6","bc2:0.6","oc1:0.4","oc2:0.4","dust1:0.6","dust2:0.6","dust3:0.6","dust4:0.6","dust5:0.6","seas1:0.5","seas2:0.5","seas3:0.5","seas4:0.5","seas5:0.5" + do_sppt = .false. + do_shum = .false. + do_skeb = .false. + do_RRTMGP = .false. + active_gases = 'h2o_co2_o3_n2o_ch4_o2' + ngases = 6 + lw_file_gas = 'rrtmgp-data-lw-g128-210809.nc' + lw_file_clouds = 'rrtmgp-cloud-optics-coeffs-lw.nc' + sw_file_gas = 'rrtmgp-data-sw-g112-210809.nc' + sw_file_clouds = 'rrtmgp-cloud-optics-coeffs-sw.nc' + rrtmgp_nGptsSW = 112 + rrtmgp_nGptsLW = 128 + rrtmgp_nBandsLW = 16 + rrtmgp_nBandsSW = 14 + doGP_cldoptics_LUT = .true. + doGP_lwscat = .true. + doGP_sgs_cnv = .true. + use_med_flux = .false. + frac_grid = .true. + cplchm = .false. + cplflx = .false. + cplice = .false. + cplwav = .false. + cplwav2atm = .false. + cpllnd = .false. + do_ca = .true. + ca_global = .false. + ca_sgs = .true. + nca = 1 + ncells = 5 + nlives = 12 + nseed = 1 + nfracseed = 0.5 + nthresh = 18 + ca_trigger = .true. + nspinup = 1 + iseed_ca = 12345 + lndp_type = 0 + n_var_lndp = 0 +/ + +&cires_ugwp_nml + knob_ugwp_solver = 2 + knob_ugwp_source = 1,1,0,0 + knob_ugwp_wvspec = 1,25,25,25 + knob_ugwp_azdir = 2,4,4,4 + knob_ugwp_stoch = 0,0,0,0 + knob_ugwp_effac = 1,1,1,1 + knob_ugwp_doaxyz = 1 + knob_ugwp_doheat = 1 + knob_ugwp_dokdis = 1 + knob_ugwp_ndx4lh = 1 + knob_ugwp_version = 0 + launch_level = 54 +/ + +&gfdl_cloud_microphysics_nml + sedi_transport = .true. + do_sedi_heat = .false. + rad_snow = .true. + rad_graupel = .true. + rad_rain = .true. + const_vi = .false. + const_vs = .false. + const_vg = .false. + const_vr = .false. + vi_max = 1. + vs_max = 2. + vg_max = 12. + vr_max = 12. + qi_lim = 1. + prog_ccn = .false. + do_qa = .true. + fast_sat_adj = .true. + tau_l2v = 225. + tau_v2l = 150. + tau_g2v = 900. + rthresh = 10.e-6 ! This is a key parameter for cloud water + dw_land = 0.16 + dw_ocean = 0.10 + ql_gen = 1.0e-3 + ql_mlt = 1.0e-3 + qi0_crt = 8.0E-5 + qs0_crt = 1.0e-3 + tau_i2s = 1000. + c_psaci = 0.05 + c_pgacs = 0.01 + rh_inc = 0.30 + rh_inr = 0.30 + rh_ins = 0.30 + ccn_l = 300. + ccn_o = 100. + c_paut = 0.5 + c_cracw = 0.8 + use_ppm = .false. + use_ccn = .true. + mono_prof = .true. + z_slope_liq = .true. + z_slope_ice = .true. + de_ice = .false. + fix_negative = .true. + icloud_f = 1 + mp_time = 150. + reiflag = 2 +/ + +&interpolator_nml + interp_method = 'conserve_great_circle' +/ + +&namsfc + FNGLAC = 'global_glacier.2x2.grb' + FNMXIC = 'global_maxice.2x2.grb' + FNTSFC = 'RTGSST.1982.2012.monthly.clim.grb' + FNSNOC = 'global_snoclim.1.875.grb' + FNZORC = 'igbp' + FNALBC = 'C96.snowfree_albedo.tileX.nc' + FNALBC2 = 'C96.facsf.tileX.nc' + FNAISC = 'IMS-NIC.blended.ice.monthly.clim.grb' + FNTG3C = 'C96.substrate_temperature.tileX.nc' + FNVEGC = 'C96.vegetation_greenness.tileX.nc' + FNVETC = 'C96.vegetation_type.tileX.nc' + FNSOTC = 'C96.soil_type.tileX.nc' + FNSMCC = 'global_soilmgldas.statsgo.t1534.3072.1536.grb' + FNMSKH = 'global_slmask.t1534.3072.1536.grb' + FNTSFA = '' + FNACNA = '' + FNSNOA = '' + FNVMNC = 'C96.vegetation_greenness.tileX.nc' + FNVMXC = 'C96.vegetation_greenness.tileX.nc' + FNSLPC = 'C96.slope_type.tileX.nc' + FNABSC = 'C96.maximum_snow_albedo.tileX.nc' + LDEBUG =.false. + FSMCL(2) = 99999 + FSMCL(3) = 99999 + FSMCL(4) = 99999 + LANDICE = .false. + FTSFS = 90 + FAISL = 99999 + FAISS = 99999 + FSNOL = 99999 + FSNOS = 99999 + FSICL = 0 + FSICS = 0 + FTSFL = 99999 + FVETL = 99999 + FSOTL = 99999 + FvmnL = 99999 + FvmxL = 99999 + FSLPL = 99999 + FABSL = 99999 +/ + +&fv_grid_nml + grid_file = 'INPUT/grid_spec.nc' +/ + +&nam_stochy +/ + +&nam_sfcperts + lndp_type = 0 + lndp_model_type = 0 + LNDP_TAU=21600, + LNDP_LSCALE=500000, + ISEED_LNDP=2010, + lndp_var_list = 'XXX' + lndp_prt_list = -999 +/ + +&MOM_input_nml + output_directory = 'MOM6_OUTPUT/', + input_filename = 'n' + restart_input_dir = 'INPUT/', + restart_output_dir = 'RESTART/', + parameter_filename = 'INPUT/MOM_input', + 'INPUT/MOM_override'/ diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/jkhINFO b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/jkhINFO new file mode 100644 index 0000000000..e37930dc98 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/jkhINFO @@ -0,0 +1,4 @@ + +try c3 + - set progsigma to .true. for now + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/logs/2022111000.log new file mode 100644 index 0000000000..d362807934 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/logs/2022111000.log @@ -0,0 +1,74 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-28 07:30:29 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28493642 +2023-05-28 07:30:30 +0000 :: fe3 :: Task gfsfcst, jobid=28493642, in state DEAD (TIMEOUT), ran for 2404.0 seconds, exit status=15, try=2 (of 2) +2023-05-28 07:34:01 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 07:34:01 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:39381 +2023-05-28 19:08:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28495562 +2023-05-28 19:08:14 +0000 :: fe3 :: Task gfsfcst, jobid=28495562, in state FAILED (TIMEOUT), ran for 2413.0 seconds, exit status=255, try=1 (of 2) +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28521683 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28521684 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28521685 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28521686 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28521687 +2023-05-29 07:52:24 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 07:52:24 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28547487 +2023-05-29 08:33:19 +0000 :: fe3 :: Task gfsfcst, jobid=28547487, in state FAILED (FAILED), ran for 263.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 08:33:19 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 08:33:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:46395 +2023-05-29 18:37:39 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 18:37:39 +0000 :: fe5 :: Submission of gfsfcst succeeded, jobid=28570547 +2023-05-29 18:39:30 +0000 :: fe5 :: Task gfsfcst, jobid=28570547, in state FAILED (FAILED), ran for 12.0 seconds, exit status=32512, try=1 (of 2) +2023-05-29 18:39:30 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 18:39:30 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:34989 +2023-05-29 22:25:28 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28570561 +2023-05-29 22:25:28 +0000 :: fe2 :: Task gfsfcst, jobid=28570561, in state DEAD (FAILED), ran for 212.0 seconds, exit status=11, try=2 (of 2) +2023-05-29 22:29:20 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 22:29:20 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:35151 +2023-05-29 23:06:16 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 23:06:17 +0000 :: fe2 :: Submission of gfsfcst failed! sbatch: error: QOSMaxWallDurationPerJobLimit +sbatch: error: Batch job submission failed: Job violates accounting/QOS policy (job submit limit, user's size and/or time limits) +2023-05-29 23:06:38 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 23:06:38 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:43226 +2023-05-29 23:07:07 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28579860 +2023-05-29 23:07:07 +0000 :: fe2 :: Task gfsfcst, jobid=28579860, in state QUEUED (PENDING) +2023-05-29 23:07:56 +0000 :: fe2 :: Task gfsfcst, jobid=28579860, in state FAILED (CANCELLED), ran for 0.0 seconds, exit status=255, try=1 (of 2) +2023-05-29 23:07:56 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 23:07:56 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:42906 +2023-05-29 23:08:42 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28579863 +2023-05-29 23:08:42 +0000 :: fe2 :: Task gfsfcst, jobid=28579863, in state QUEUED (PENDING) +2023-05-29 23:10:21 +0000 :: fe2 :: Task gfsfcst, jobid=28579863, in state DEAD (FAILED), ran for 56.0 seconds, exit status=35072, try=2 (of 2) +2023-05-29 23:11:07 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 23:11:07 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:45028 +2023-05-30 08:17:50 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28580141 +2023-05-30 08:17:51 +0000 :: fe3 :: Task gfsfcst, jobid=28580141, in state FAILED (FAILED), ran for 2445.0 seconds, exit status=11, try=1 (of 2) +2023-05-30 08:17:51 +0000 :: fe3 :: Submitting gfsfcst +2023-05-30 08:17:51 +0000 :: fe3 :: Submitting gfspost_f000-f006 +2023-05-30 08:17:51 +0000 :: fe3 :: Submitting gfspost_f012-f018 +2023-05-30 08:17:51 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28599193 +2023-05-30 08:17:51 +0000 :: fe3 :: Submission of gfspost_f000-f006 succeeded, jobid=28599194 +2023-05-30 08:17:52 +0000 :: fe3 :: Submission of gfspost_f012-f018 succeeded, jobid=28599195 +2023-05-31 08:40:56 +0000 :: fe2 :: Task gfsfcst, jobid=28599193, in state DEAD (FAILED), ran for 2273.0 seconds, exit status=11, try=2 (of 2) +2023-05-31 08:40:56 +0000 :: fe2 :: Task gfspost_f000-f006, jobid=28599194, in state SUCCEEDED (COMPLETED), ran for 257.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 08:40:56 +0000 :: fe2 :: Task gfspost_f012-f018, jobid=28599195, in state SUCCEEDED (COMPLETED), ran for 269.0 seconds, exit status=0, try=1 (of 2) +2023-05-31 08:41:14 +0000 :: fe2 :: Forcibly submitting gfsfcst +2023-05-31 08:41:14 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:42295 +2023-05-31 08:45:43 +0000 :: fe2 :: Task gfsfcst, jobid=28651211, in state FAILED (CANCELLED), ran for 0.0 seconds, exit status=255, try=1 (of 2) +2023-05-31 08:45:45 +0000 :: fe2 :: Task gfspost_f000-f006, jobid=28651212, in state RUNNING (RUNNING) +2023-05-31 08:45:45 +0000 :: fe2 :: Task gfspost_f012-f018, jobid=28651213, in state RUNNING (RUNNING) +2023-05-31 08:46:57 +0000 :: fe2 :: Submitting gfsfcst +2023-05-31 08:46:58 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:37474 diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.db b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.db new file mode 100644 index 0000000000..ab472a02ee Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.db differ diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.xml new file mode 100644 index 0000000000..42c60defff --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3.xml @@ -0,0 +1,155 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + + batch + 02:00:00 + 232:ppn=24:tpp=1 + xjet + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f006 _f012-f018 _f024-f030 _f036-f042 _f048-f054 _f060-f066 _f072-f078 _f084-f090 _f096-f102 _f108-f114 _f120-f126 _f132-f138 _f144-f150 _f156-f162 _f168-f168 + f006 f018 f030 f042 f054 f066 f078 f090 f102 f114 f126 f138 f150 f162 f168 + f000_f006 f012_f018 f024_f030 f036_f042 f048_f054 f060_f066 f072_f078 f084_f090 f096_f102 f108_f114 f120_f126 f132_f138 f144_f150 f156_f162 f168_f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3_lock.db b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3_lock.db new file mode 100644 index 0000000000..97ee21bbf8 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_c3_12x16_xjet/v17_p8_c3_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/8x8 b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/8x8 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base new file mode 100644 index 0000000000..a5fa40ac7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_8x8_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.com b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst new file mode 100644 index 0000000000..55745cafde --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst @@ -0,0 +1,429 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model_haiqin.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF +#JKH export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +#JKH export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst.org b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst.org new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst.org @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.init b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.post b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ufs new file mode 100644 index 0000000000..580431cf19 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.ufs @@ -0,0 +1,371 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=8 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=8 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/input.nml b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/input.nml new file mode 100644 index 0000000000..891c60fee7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/input.nml @@ -0,0 +1,386 @@ +&atmos_model_nml + blocksize = 32 + chksum_debug = .false. + dycore_only = .false. + ccpp_suite = 'FV3_GFS_v17_p8_c3' +/ + +&diag_manager_nml + prepend_date = .false. + max_output_fields = 310 +/ + +&fms_io_nml + checksum_required = .false. + max_files_r = 100 + max_files_w = 100 +/ + +&mpp_io_nml +shuffle=1 +deflate_level=1 +/ + +&fms_nml + clock_grain = 'ROUTINE' + domains_stack_size = 3000000 + print_memory_usage = .false. +/ + +&fv_core_nml + layout = 3,8 + io_layout = 1,1 + npx = 97 + npy = 97 + ntiles = 6 + npz = 127 + dz_min = 6 + psm_bc = 1 + grid_type = -1 + make_nh = .true. + fv_debug = .false. + range_warn = .true. + reset_eta = .false. + n_sponge = 42 + nudge_qv = .true. + nudge_dz = .false. + tau = 10.0 + rf_cutoff = 7.5e2 + d2_bg_k1 = 0.20 + d2_bg_k2 = 0.04 + kord_tm = -9 + kord_mt = 9 + kord_wz = 9 + kord_tr = 9 + hydrostatic = .false. + phys_hydrostatic = .false. + use_hydro_pressure = .false. + beta = 0. + a_imp = 1. + p_fac = 0.1 + k_split = 2 + n_split = 6 + nwat = 6 + na_init = 1 + d_ext = 0. + dnats = 0 + fv_sg_adj = 450 + d2_bg = 0. + nord = 2 + dddmp = 0.1 + d4_bg = 0.12 + vtdm4 = 0.02 + delt_max = 0.002 + ke_bg = 0. + do_vort_damp = .true. + external_ic = .true. + external_eta = .true. + gfs_phil = .false. + nggps_ic = .true. + mountain = .false. + ncep_ic = .false. + d_con = 1. + hord_mt = 5 + hord_vt = 5 + hord_tm = 5 + hord_dp = -5 + hord_tr = 8 + adjust_dry_mass = .false. + dry_mass=98320.0 + consv_te = 1. + do_sat_adj = .false. + consv_am = .false. + fill = .true. + dwind_2d = .false. + print_freq = 6 + warm_start = .false. + no_dycore = .false. + z_tracer = .true. + agrid_vel_rst = .true. + read_increment = .false. + res_latlon_dynamics = '' +/ + +&external_ic_nml + filtered_terrain = .true. + levp = 128 + gfs_dwinds = .true. + checker_tr = .false. + nt_checker = 0 +/ + +&gfs_physics_nml + fhzero = 6 + h2o_phys = .true. + ldiag3d = .false. + qdiag3d = .false. + print_diff_pgr = .false. + fhcyc = 24 + use_ufo = .true. + pre_rad = .false. + imp_physics = 8 + iovr = 3 + ltaerosol = .false. + lradar = .false. + ttendlim = -999 + dt_inner = 720 + sedi_semi = .true. + decfl = 10 + oz_phys = .false. + oz_phys_2015 = .true. + lsoil_lsm = 4 + do_mynnedmf = .false. + do_mynnsfclay = .false. + icloud_bl = 1 + bl_mynn_edmf = 1 + bl_mynn_tkeadvect = .true. + bl_mynn_edmf_mom = 1 + do_ugwp = .false. + do_tofd = .false. + gwd_opt = 2 + do_ugwp_v0 = .true. + do_ugwp_v1 = .false. + do_ugwp_v0_orog_only = .false. + do_ugwp_v0_nst_only = .false. + do_gsl_drag_ls_bl = .false. + do_gsl_drag_ss = .true. + do_gsl_drag_tofd = .false. + do_ugwp_v1_orog_only = .false. + min_lakeice = 0.15 + min_seaice = 0.15 + use_cice_alb = .false. + pdfcld = .false. + fhswr = 3600. + fhlwr = 3600. + ialb = 2 + iems = 2 + iaer = 1011 + icliq_sw = 2 + ico2 = 2 + isubc_sw = 2 + isubc_lw = 2 + isol = 2 + lwhtr = .true. + swhtr = .true. + cnvgwd = .true. + shal_cnv = .true. + cal_pre = .false. + redrag = .true. + dspheat = .true. + hybedmf = .false. + satmedmf = .true. + isatmedmf = 1 + lheatstrg = .false. + lseaspray = .true. + random_clds = .false. + trans_trac = .true. + cnvcld = .true. + imfshalcnv = 5 + imfdeepcnv = 5 + icoldpool = 0 + ras = .false. + cdmbgwd = 0.14,1.8,1.0,1.0 + prslrd0 = 0. + ivegsrc = 1 + isot = 1 + lsoil = 4 + lsm = 2 + iopt_dveg = 4 + iopt_crs = 2 + iopt_btr = 1 + iopt_run = 1 + iopt_sfc = 3 + iopt_trs = 2 + iopt_frz = 1 + iopt_inf = 1 + iopt_rad = 3 + iopt_alb = 1 + iopt_snf = 4 + iopt_tbot = 2 + iopt_stc = 3 + debug = .false. + nstf_name = 2,1,0,0,0 + nst_anl = .true. + psautco = 0.0008,0.0005 + prautco = 0.00015,0.00015 + lgfdlmprad = .false. + effr_in = .true. + ldiag_ugwp = .false. + fscav_aero = "*:0.3","so2:0.0","msa:0.0","dms:0.0","nh3:0.4","nh4:0.6","bc1:0.6","bc2:0.6","oc1:0.4","oc2:0.4","dust1:0.6","dust2:0.6","dust3:0.6","dust4:0.6","dust5:0.6","seas1:0.5","seas2:0.5","seas3:0.5","seas4:0.5","seas5:0.5" + do_sppt = .false. + do_shum = .false. + do_skeb = .false. + do_RRTMGP = .false. + active_gases = 'h2o_co2_o3_n2o_ch4_o2' + ngases = 6 + lw_file_gas = 'rrtmgp-data-lw-g128-210809.nc' + lw_file_clouds = 'rrtmgp-cloud-optics-coeffs-lw.nc' + sw_file_gas = 'rrtmgp-data-sw-g112-210809.nc' + sw_file_clouds = 'rrtmgp-cloud-optics-coeffs-sw.nc' + rrtmgp_nGptsSW = 112 + rrtmgp_nGptsLW = 128 + rrtmgp_nBandsLW = 16 + rrtmgp_nBandsSW = 14 + doGP_cldoptics_LUT = .true. + doGP_lwscat = .true. + doGP_sgs_cnv = .true. + use_med_flux = .false. + frac_grid = .true. + cplchm = .false. + cplflx = .false. + cplice = .false. + cplwav = .false. + cplwav2atm = .false. + cpllnd = .false. + do_ca = .true. + ca_global = .false. + ca_sgs = .true. + nca = 1 + ncells = 5 + nlives = 12 + nseed = 1 + nfracseed = 0.5 + nthresh = 18 + ca_trigger = .true. + nspinup = 1 + iseed_ca = 12345 + lndp_type = 0 + n_var_lndp = 0 +/ + +&cires_ugwp_nml + knob_ugwp_solver = 2 + knob_ugwp_source = 1,1,0,0 + knob_ugwp_wvspec = 1,25,25,25 + knob_ugwp_azdir = 2,4,4,4 + knob_ugwp_stoch = 0,0,0,0 + knob_ugwp_effac = 1,1,1,1 + knob_ugwp_doaxyz = 1 + knob_ugwp_doheat = 1 + knob_ugwp_dokdis = 1 + knob_ugwp_ndx4lh = 1 + knob_ugwp_version = 0 + launch_level = 54 +/ + +&gfdl_cloud_microphysics_nml + sedi_transport = .true. + do_sedi_heat = .false. + rad_snow = .true. + rad_graupel = .true. + rad_rain = .true. + const_vi = .false. + const_vs = .false. + const_vg = .false. + const_vr = .false. + vi_max = 1. + vs_max = 2. + vg_max = 12. + vr_max = 12. + qi_lim = 1. + prog_ccn = .false. + do_qa = .true. + fast_sat_adj = .true. + tau_l2v = 225. + tau_v2l = 150. + tau_g2v = 900. + rthresh = 10.e-6 ! This is a key parameter for cloud water + dw_land = 0.16 + dw_ocean = 0.10 + ql_gen = 1.0e-3 + ql_mlt = 1.0e-3 + qi0_crt = 8.0E-5 + qs0_crt = 1.0e-3 + tau_i2s = 1000. + c_psaci = 0.05 + c_pgacs = 0.01 + rh_inc = 0.30 + rh_inr = 0.30 + rh_ins = 0.30 + ccn_l = 300. + ccn_o = 100. + c_paut = 0.5 + c_cracw = 0.8 + use_ppm = .false. + use_ccn = .true. + mono_prof = .true. + z_slope_liq = .true. + z_slope_ice = .true. + de_ice = .false. + fix_negative = .true. + icloud_f = 1 + mp_time = 150. + reiflag = 2 +/ + +&interpolator_nml + interp_method = 'conserve_great_circle' +/ + +&namsfc + FNGLAC = 'global_glacier.2x2.grb' + FNMXIC = 'global_maxice.2x2.grb' + FNTSFC = 'RTGSST.1982.2012.monthly.clim.grb' + FNSNOC = 'global_snoclim.1.875.grb' + FNZORC = 'igbp' + FNALBC = 'C96.snowfree_albedo.tileX.nc' + FNALBC2 = 'C96.facsf.tileX.nc' + FNAISC = 'IMS-NIC.blended.ice.monthly.clim.grb' + FNTG3C = 'C96.substrate_temperature.tileX.nc' + FNVEGC = 'C96.vegetation_greenness.tileX.nc' + FNVETC = 'C96.vegetation_type.tileX.nc' + FNSOTC = 'C96.soil_type.tileX.nc' + FNSMCC = 'global_soilmgldas.statsgo.t1534.3072.1536.grb' + FNMSKH = 'global_slmask.t1534.3072.1536.grb' + FNTSFA = '' + FNACNA = '' + FNSNOA = '' + FNVMNC = 'C96.vegetation_greenness.tileX.nc' + FNVMXC = 'C96.vegetation_greenness.tileX.nc' + FNSLPC = 'C96.slope_type.tileX.nc' + FNABSC = 'C96.maximum_snow_albedo.tileX.nc' + LDEBUG =.false. + FSMCL(2) = 99999 + FSMCL(3) = 99999 + FSMCL(4) = 99999 + LANDICE = .false. + FTSFS = 90 + FAISL = 99999 + FAISS = 99999 + FSNOL = 99999 + FSNOS = 99999 + FSICL = 0 + FSICS = 0 + FTSFL = 99999 + FVETL = 99999 + FSOTL = 99999 + FvmnL = 99999 + FvmxL = 99999 + FSLPL = 99999 + FABSL = 99999 +/ + +&fv_grid_nml + grid_file = 'INPUT/grid_spec.nc' +/ + +&nam_stochy +/ + +&nam_sfcperts + lndp_type = 0 + lndp_model_type = 0 + LNDP_TAU=21600, + LNDP_LSCALE=500000, + ISEED_LNDP=2010, + lndp_var_list = 'XXX' + lndp_prt_list = -999 +/ + +&MOM_input_nml + output_directory = 'MOM6_OUTPUT/', + input_filename = 'n' + restart_input_dir = 'INPUT/', + restart_output_dir = 'RESTART/', + parameter_filename = 'INPUT/MOM_input', + 'INPUT/MOM_override'/ diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/jkhINFO b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/jkhINFO new file mode 100644 index 0000000000..e37930dc98 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/jkhINFO @@ -0,0 +1,4 @@ + +try c3 + - set progsigma to .true. for now + diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/logs/2022111000.log new file mode 100644 index 0000000000..4bcd9b6625 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/logs/2022111000.log @@ -0,0 +1,49 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-28 07:30:29 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28493642 +2023-05-28 07:30:30 +0000 :: fe3 :: Task gfsfcst, jobid=28493642, in state DEAD (TIMEOUT), ran for 2404.0 seconds, exit status=15, try=2 (of 2) +2023-05-28 07:34:01 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 07:34:01 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:39381 +2023-05-28 19:08:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28495562 +2023-05-28 19:08:14 +0000 :: fe3 :: Task gfsfcst, jobid=28495562, in state FAILED (TIMEOUT), ran for 2413.0 seconds, exit status=255, try=1 (of 2) +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28521683 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28521684 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28521685 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28521686 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28521687 +2023-05-29 07:52:24 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 07:52:24 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28547487 +2023-05-29 08:33:19 +0000 :: fe3 :: Task gfsfcst, jobid=28547487, in state FAILED (FAILED), ran for 263.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 08:33:19 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 08:33:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:46395 +2023-05-29 18:37:39 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 18:37:39 +0000 :: fe5 :: Submission of gfsfcst succeeded, jobid=28570547 +2023-05-29 18:39:30 +0000 :: fe5 :: Task gfsfcst, jobid=28570547, in state FAILED (FAILED), ran for 12.0 seconds, exit status=32512, try=1 (of 2) +2023-05-29 18:39:30 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 18:39:30 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:34989 +2023-05-29 22:25:28 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28570561 +2023-05-29 22:25:28 +0000 :: fe2 :: Task gfsfcst, jobid=28570561, in state DEAD (FAILED), ran for 212.0 seconds, exit status=11, try=2 (of 2) +2023-05-29 22:29:20 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 22:29:20 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:35151 +2023-05-31 08:39:35 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28578381 +2023-05-31 08:39:35 +0000 :: fe2 :: Task gfsfcst, jobid=28578381, in state FAILED (TIMEOUT), ran for 1808.0 seconds, exit status=255, try=1 (of 2) +2023-05-31 08:39:36 +0000 :: fe2 :: Submitting gfsfcst +2023-05-31 08:39:36 +0000 :: fe2 :: Submitting gfspost_f000-f000 +2023-05-31 08:39:36 +0000 :: fe2 :: Submitting gfspost_f006-f006 +2023-05-31 08:39:36 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28651062 +2023-05-31 08:39:36 +0000 :: fe2 :: Submission of gfspost_f000-f000 succeeded, jobid=28651063 +2023-05-31 08:39:36 +0000 :: fe2 :: Submission status of gfspost_f006-f006 is pending at druby://fe2:34637 diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.crontab b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.db b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.db new file mode 100644 index 0000000000..6feac6d143 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.db differ diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.xml b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.xml new file mode 100644 index 0000000000..159f854338 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3.xml @@ -0,0 +1,154 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + + batch + 00:50:00 + 104:ppn=24:tpp=1 + xjet + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3_lock.db b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3_lock.db new file mode 100644 index 0000000000..404d6b7900 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_c3_8x8_xjet/v17_p8_c3_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/12x12x2wg b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aero b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.anal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.analcalc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.analdiag b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.arch b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.awips b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base new file mode 100644 index 0000000000..6ad413f953 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_vjet_2wg" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.com b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.earc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ecen b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.echgres b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ediag b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.efcs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.eobs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.epos b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.esfc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.eupd b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fit2obs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.gempak b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.getic b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.gldas b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ice b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.init b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.metp b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.nsst b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocn b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnpost b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.post b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.postsnd b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.prep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.resources b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.sfcanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ufs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ufs new file mode 100644 index 0000000000..ad28ce429b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.vrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsblending b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wave b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavegempak b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveprep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/runcmds b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/v17_p8.xml new file mode 100644 index 0000000000..6ca0b36adc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg/v17_p8.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + vjet + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/12x12x2wg b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aero b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.anal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.analcalc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.analdiag b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.arch b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.awips b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base new file mode 100644 index 0000000000..baeacea208 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_vjet_2wg_24h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.com b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.earc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ecen b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.echgres b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ediag b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.efcs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.eobs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.epos b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.esfc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.eupd b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.gempak b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.getic b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.gldas b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ice b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.init b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.metp b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.nsst b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocn b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.post b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.postsnd b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.prep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.resources b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ufs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ufs new file mode 100644 index 0000000000..ad28ce429b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.vrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wave b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveprep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/runcmds b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/v17_p8.xml new file mode 100644 index 0000000000..85ea2d29de --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_24h/v17_p8.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + vjet + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/12x12x2wg b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aero b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.anal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.analcalc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.analdiag b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.arch b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.awips b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base new file mode 100644 index 0000000000..11a62cab1e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_vjet_2wg_6h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-6} +export FHMAX_GFS_06=${FHMAX_GFS_06:-6} +export FHMAX_GFS_12=${FHMAX_GFS_12:-6} +export FHMAX_GFS_18=${FHMAX_GFS_18:-6} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.com b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.earc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ecen b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.echgres b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ediag b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.efcs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.eobs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.epos b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.esfc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.eupd b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.gempak b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.getic b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.gldas b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ice b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.init b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.metp b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.nsst b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocn b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.post b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.postsnd b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.prep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.resources b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ufs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ufs new file mode 100644 index 0000000000..ad28ce429b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.vrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wave b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveprep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/runcmds b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/runcmds new file mode 100644 index 0000000000..fc9fb92625 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_mynn.xml -d v17_p8_mynn.db +rocotostat -w v17_p8_mynn.xml -d v17_p8_mynn.db diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/v17_p8_mynn.crontab b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/v17_p8_mynn.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/v17_p8_mynn.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/v17_p8_mynn.xml b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/v17_p8_mynn.xml new file mode 100644 index 0000000000..07a5ef1598 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_vjet_2wg_6h/v17_p8_mynn.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + vjet + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/12x12x2wgx12wt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aero b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.anal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.analcalc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.analdiag b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.arch b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.awips b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base_f024 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base_f024 new file mode 100644 index 0000000000..872441362a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.base_f024 @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.com b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.earc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ecen b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.echgres b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ediag b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.efcs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.eobs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.epos b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.esfc b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.eupd b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.gempak b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.getic b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.gldas b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ice b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.init b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.metp b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.nsst b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocn b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.post b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.postsnd b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.prep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.resources b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ufs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.vrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wave b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveinit b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveprep b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/runcmds b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/runcmds new file mode 100644 index 0000000000..fc9fb92625 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_mynn.xml -d v17_p8_mynn.db +rocotostat -w v17_p8_mynn.xml -d v17_p8_mynn.db diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.crontab b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.db_f024 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.db_f024 new file mode 100644 index 0000000000..690d5e37d8 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.db_f024 differ diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.xml_f024 b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.xml_f024 new file mode 100644 index 0000000000..7f06945f6b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x12_xjet_2wg_12wt_24h/v17_p8_mynn.xml_f024 @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/12x16 b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/12x16 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base new file mode 100644 index 0000000000..f1398a3a05 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x16_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.com b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst.org b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst.org new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst.org @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst_test b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst_test new file mode 100644 index 0000000000..315ccef70c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fcst_test @@ -0,0 +1,437 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + #JKHexport progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + #jkhexport ncld=2 + #jkhexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + #jkhexport ltaerosol=".false." + #jkhexport lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + #jkhexport hord_mt_nh_nonmono=5 + #jkhexport hord_xx_nh_nonmono=5 + #jkhexport vtdm4_nh_nonmono=0.02 + #jkhexport nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #jkhexport sedi_semi=.true. + #jkhexport decfl=10 + #jkhif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + #jkh export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + #jkh export ltaerosol=".true." + #jkhelse + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export ltaerosol=".false." + #jkhfi + export lradar=".false." + #jkhexport dt_inner=$((DELTIM/2)) + #jkhif [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.init b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.post b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ufs new file mode 100644 index 0000000000..a96ba126e2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.ufs @@ -0,0 +1,370 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/logs/2022110900.log b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/logs/2022110900.log new file mode 100644 index 0000000000..d981e07453 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/logs/2022110900.log @@ -0,0 +1,6 @@ +2023-05-30 15:45:08 +0000 :: fe2 :: Submitting gfsfcst +2023-05-30 15:45:08 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:40351 +2023-05-30 20:45:54 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28616104 +2023-05-30 20:45:54 +0000 :: fe3 :: Task gfsfcst, jobid=28616104, in state FAILED (FAILED), ran for 258.0 seconds, exit status=11, try=1 (of 2) +2023-05-30 20:45:54 +0000 :: fe3 :: Submitting gfsfcst +2023-05-30 20:45:54 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:44424 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/logs/2022111000.log new file mode 100644 index 0000000000..2bed34999b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/logs/2022111000.log @@ -0,0 +1,51 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-28 07:30:29 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28493642 +2023-05-28 07:30:30 +0000 :: fe3 :: Task gfsfcst, jobid=28493642, in state DEAD (TIMEOUT), ran for 2404.0 seconds, exit status=15, try=2 (of 2) +2023-05-28 07:34:01 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 07:34:01 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:39381 +2023-05-28 19:08:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28495562 +2023-05-28 19:08:14 +0000 :: fe3 :: Task gfsfcst, jobid=28495562, in state FAILED (TIMEOUT), ran for 2413.0 seconds, exit status=255, try=1 (of 2) +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28521683 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28521684 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28521685 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28521686 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28521687 +2023-05-29 07:52:24 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 07:52:24 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28547487 +2023-05-29 08:33:19 +0000 :: fe3 :: Task gfsfcst, jobid=28547487, in state FAILED (FAILED), ran for 263.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 08:33:19 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 08:33:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:46395 +2023-05-29 19:11:11 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28549002 +2023-05-29 19:11:11 +0000 :: fe2 :: Task gfsfcst, jobid=28549002, in state DEAD (FAILED), ran for 235.0 seconds, exit status=9, try=2 (of 2) +2023-05-29 19:11:28 +0000 :: fe2 :: Forcibly submitting gfsfcst +2023-05-29 19:11:28 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:35093 +2023-05-29 19:25:59 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28571969 +2023-05-29 19:26:02 +0000 :: fe3 :: Task gfsfcst, jobid=28571969, in state DEAD (FAILED), ran for 419.0 seconds, exit status=35584, try=4 (of 2) +2023-05-29 19:27:44 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 19:27:44 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:38705 +2023-05-29 19:36:31 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28572285 +2023-05-29 19:36:31 +0000 :: fe3 :: Task gfsfcst, jobid=28572285, in state DEAD (FAILED), ran for 233.0 seconds, exit status=35584, try=6 (of 2) +2023-05-29 19:36:31 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 19:36:31 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:43875 +2023-05-30 08:25:36 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28572472 +2023-05-30 08:25:37 +0000 :: fe3 :: Task gfsfcst, jobid=28572472, in state DEAD (FAILED), ran for 400.0 seconds, exit status=11, try=8 (of 2) +2023-05-30 08:25:53 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-30 08:25:53 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:44065 +2023-05-30 08:26:15 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28599398 +2023-05-30 08:26:15 +0000 :: fe3 :: Task gfsfcst, jobid=28599398, in state QUEUED (PENDING) +2023-05-30 15:45:08 +0000 :: fe2 :: This cycle is complete: Success diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/runcmds new file mode 100644 index 0000000000..fc9fb92625 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_mynn.xml -d v17_p8_mynn.db +rocotostat -w v17_p8_mynn.xml -d v17_p8_mynn.db diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.crontab b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.db b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.db new file mode 100644 index 0000000000..5be2c27e56 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.db differ diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.xml b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.xml new file mode 100644 index 0000000000..c54b793da3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + + debug + 00:30:00 + 232:ppn=24:tpp=1 + xjet + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn_lock.db b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn_lock.db new file mode 100644 index 0000000000..f621552075 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_mynn_12x16_xjet/v17_p8_mynn_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/8x8x2wg b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/8x8x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aero b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.anal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.analcalc b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.analdiag b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.arch b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.awips b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base new file mode 100644 index 0000000000..550a8211b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_8x8_vjet_2wg" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.com b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.earc b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ecen b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.echgres b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ediag b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.efcs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.eobs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.epos b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.esfc b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.eupd b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fit2obs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.gempak b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.getic b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.gldas b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ice b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.init b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.metp b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.nsst b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocn b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnpost b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.post b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.postsnd b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.prep b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.resources b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.sfcanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ufs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ufs new file mode 100644 index 0000000000..a04f6ba9d9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=8 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=8 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.vrfy b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsblending b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wave b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavegempak b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveprep b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/runcmds b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/runcmds new file mode 100644 index 0000000000..fc9fb92625 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_mynn.xml -d v17_p8_mynn.db +rocotostat -w v17_p8_mynn.xml -d v17_p8_mynn.db diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/v17_p8_mynn.xml b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/v17_p8_mynn.xml new file mode 100644 index 0000000000..92f92c917b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_vjet_2wg/v17_p8_mynn.xml @@ -0,0 +1,152 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:20:00 + vjet + 126:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/8x8 b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/8x8 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base new file mode 100644 index 0000000000..c8bff1428c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_8x8_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.com b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst.org b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst.org new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst.org @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst_test b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst_test new file mode 100644 index 0000000000..315ccef70c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fcst_test @@ -0,0 +1,437 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + #JKHexport progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + #jkhexport ncld=2 + #jkhexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + #jkhexport ltaerosol=".false." + #jkhexport lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + #jkhexport hord_mt_nh_nonmono=5 + #jkhexport hord_xx_nh_nonmono=5 + #jkhexport vtdm4_nh_nonmono=0.02 + #jkhexport nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #jkhexport sedi_semi=.true. + #jkhexport decfl=10 + #jkhif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + #jkh export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + #jkh export ltaerosol=".true." + #jkhelse + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export ltaerosol=".false." + #jkhfi + export lradar=".false." + #jkhexport dt_inner=$((DELTIM/2)) + #jkhif [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.init b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.post b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ufs new file mode 100644 index 0000000000..580431cf19 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.ufs @@ -0,0 +1,371 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=8 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=8 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/logs/2022111000.log new file mode 100644 index 0000000000..c6a3b7ac57 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/logs/2022111000.log @@ -0,0 +1,50 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-28 07:30:29 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28493642 +2023-05-28 07:30:30 +0000 :: fe3 :: Task gfsfcst, jobid=28493642, in state DEAD (TIMEOUT), ran for 2404.0 seconds, exit status=15, try=2 (of 2) +2023-05-28 07:34:01 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 07:34:01 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:39381 +2023-05-28 19:08:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28495562 +2023-05-28 19:08:14 +0000 :: fe3 :: Task gfsfcst, jobid=28495562, in state FAILED (TIMEOUT), ran for 2413.0 seconds, exit status=255, try=1 (of 2) +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28521683 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28521684 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28521685 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28521686 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28521687 +2023-05-29 07:52:24 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 07:52:24 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28547487 +2023-05-29 08:33:19 +0000 :: fe3 :: Task gfsfcst, jobid=28547487, in state FAILED (FAILED), ran for 263.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 08:33:19 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 08:33:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:46395 +2023-05-29 19:11:11 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28549002 +2023-05-29 19:11:11 +0000 :: fe2 :: Task gfsfcst, jobid=28549002, in state DEAD (FAILED), ran for 235.0 seconds, exit status=9, try=2 (of 2) +2023-05-29 19:11:28 +0000 :: fe2 :: Forcibly submitting gfsfcst +2023-05-29 19:11:28 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:35093 +2023-05-29 19:25:59 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28571969 +2023-05-29 19:26:02 +0000 :: fe3 :: Task gfsfcst, jobid=28571969, in state DEAD (FAILED), ran for 419.0 seconds, exit status=35584, try=4 (of 2) +2023-05-29 19:27:44 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 19:27:44 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:38705 +2023-05-29 19:36:31 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28572285 +2023-05-29 19:36:31 +0000 :: fe3 :: Task gfsfcst, jobid=28572285, in state DEAD (FAILED), ran for 233.0 seconds, exit status=35584, try=6 (of 2) +2023-05-29 19:36:31 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 19:36:31 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:43875 +2023-05-30 08:25:36 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28572472 +2023-05-30 08:25:37 +0000 :: fe3 :: Task gfsfcst, jobid=28572472, in state DEAD (FAILED), ran for 400.0 seconds, exit status=11, try=8 (of 2) +2023-05-30 08:25:53 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-30 08:25:53 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:44065 +2023-05-30 08:26:15 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28599398 +2023-05-30 08:26:15 +0000 :: fe3 :: Task gfsfcst, jobid=28599398, in state QUEUED (PENDING) diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/runcmds new file mode 100644 index 0000000000..fc9fb92625 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_mynn.xml -d v17_p8_mynn.db +rocotostat -w v17_p8_mynn.xml -d v17_p8_mynn.db diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.crontab b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.db b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.db new file mode 100644 index 0000000000..4b00ef94be Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.db differ diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.xml b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.xml new file mode 100644 index 0000000000..27ffcfc501 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + + debug + 00:30:00 + 104:ppn=24:tpp=1 + xjet + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn_lock.db b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn_lock.db new file mode 100644 index 0000000000..e818581886 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_mynn_8x8_xjet/v17_p8_mynn_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/12x12x2wg b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aero b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.anal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.analcalc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.analdiag b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.arch b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.awips b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base new file mode 100644 index 0000000000..12b668cdd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_12x12_vjet_2wg" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.com b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.earc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ecen b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.echgres b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ediag b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.efcs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.eobs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.epos b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.esfc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.eupd b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fit2obs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.gempak b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.getic b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.gldas b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ice b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.init b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.metp b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.nsst b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocn b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnpost b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.post b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.postsnd b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.prep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.resources b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.sfcanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ufs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ufs new file mode 100644 index 0000000000..ad28ce429b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.vrfy b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsblending b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wave b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavegempak b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveprep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/runcmds b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/runcmds new file mode 100644 index 0000000000..f1a8d2d8cd --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_thompson.xml -d v17_p8_thompson.db +rocotostat -w v17_p8_thompson.xml -d v17_p8_thompson.db diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/v17_p8_thompson.crontab b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/v17_p8_thompson.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/v17_p8_thompson.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/v17_p8_thompson.xml b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/v17_p8_thompson.xml new file mode 100644 index 0000000000..43ace50e9f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_vjet_2wg/v17_p8_thompson.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211090000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 04:30:00 + vjet + 246:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet/logs/2022111000.log new file mode 100644 index 0000000000..62e3bef904 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet/logs/2022111000.log @@ -0,0 +1,2 @@ +2023-06-01 20:41:00 +0000 :: fe3 :: Submitting gfsfcst +2023-06-01 20:41:00 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28731995 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/12x12x2wg b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/12x12x2wg new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aero b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.anal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.analcalc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.analdiag b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.arch b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.awips b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base new file mode 100644 index 0000000000..ca9f4f0994 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_12x12_xjet_2wg" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.com b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.earc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ecen b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.echgres b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ediag b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.efcs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.eobs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.epos b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.esfc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.eupd b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst new file mode 100644 index 0000000000..6c579000b3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst @@ -0,0 +1,429 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF +#JKH export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +#JKH export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + progsigma=.false. + tbp="" + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst.org b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst.org new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst.org @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fit2obs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.gempak b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.getic b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.gldas b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ice b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.init b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.metp b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.nsst b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocn b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnpost b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.post b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.postsnd b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.prep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.resources b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.sfcanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ufs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ufs new file mode 100644 index 0000000000..9220bf44e8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.ufs @@ -0,0 +1,372 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #export WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.vrfy b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsblending b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wave b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavegempak b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveprep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/jkhINFO b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/jkhINFO new file mode 100644 index 0000000000..17b3dad19c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/jkhINFO @@ -0,0 +1,5 @@ + +May29 + + need v17p8_thompson SDF and will need to recompile!! + need namelist parameters... diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/logs/2022111000.log new file mode 100644 index 0000000000..617bbde595 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/logs/2022111000.log @@ -0,0 +1,84 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-28 07:30:29 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28493642 +2023-05-28 07:30:30 +0000 :: fe3 :: Task gfsfcst, jobid=28493642, in state DEAD (TIMEOUT), ran for 2404.0 seconds, exit status=15, try=2 (of 2) +2023-05-28 07:34:01 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 07:34:01 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:39381 +2023-05-28 19:08:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28495562 +2023-05-28 19:08:14 +0000 :: fe3 :: Task gfsfcst, jobid=28495562, in state FAILED (TIMEOUT), ran for 2413.0 seconds, exit status=255, try=1 (of 2) +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28521683 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28521684 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28521685 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28521686 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28521687 +2023-05-29 07:52:24 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 07:52:24 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28547487 +2023-05-29 08:33:19 +0000 :: fe3 :: Task gfsfcst, jobid=28547487, in state FAILED (FAILED), ran for 263.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 08:33:19 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 08:33:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:46395 +2023-05-29 22:41:40 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 22:41:45 +0000 :: fe5 :: Submission of gfsfcst succeeded, jobid=28578743 +2023-05-29 22:47:52 +0000 :: fe5 :: Task gfsfcst, jobid=28578743, in state FAILED (CANCELLED), ran for 0.0 seconds, exit status=255, try=1 (of 2) +2023-05-29 22:47:52 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 22:47:52 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:35701 +2023-05-29 22:50:42 +0000 :: fe5 :: Submission status of previously pending gfsfcst is success, jobid=28578994 +2023-05-29 22:50:42 +0000 :: fe5 :: Task gfsfcst, jobid=28578994, in state RUNNING (RUNNING) +2023-06-01 01:41:26 +0000 :: fe5 :: Task gfsfcst, jobid=28578994, in state DEAD (FAILED), ran for 183.0 seconds, exit status=11, try=2 (of 2) +2023-06-01 01:43:09 +0000 :: fe5 :: Forcibly submitting gfsfcst +2023-06-01 01:43:09 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:36952 +2023-06-01 18:39:57 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28691806 +2023-06-01 18:40:01 +0000 :: fe3 :: Task gfsfcst, jobid=28691806, in state UNKNOWN (CANCELLED by 1254) +2023-06-01 18:40:24 +0000 :: fe3 :: Task gfsfcst, jobid=28691806, in state UNKNOWN (CANCELLED by 1254) +2023-06-01 18:40:30 +0000 :: fe3 :: Submitting gfsfcst +2023-06-01 18:40:30 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28727434 +2023-06-01 18:44:19 +0000 :: fe3 :: Task gfsfcst, jobid=28727434, in state FAILED (CANCELLED), ran for 111.0 seconds, exit status=15, try=1 (of 2) +2023-06-01 18:44:19 +0000 :: fe3 :: Submitting gfsfcst +2023-06-01 18:44:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36686 +2023-06-01 20:52:49 +0000 :: fe3 :: Task gfsfcst, jobid=28731995, in state FAILED (FAILED), ran for 6.0 seconds, exit status=256, try=1 (of 2) +2023-06-01 20:53:09 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-06-01 20:53:09 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:42184 +2023-06-01 20:54:45 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28732399 +2023-06-01 20:54:45 +0000 :: fe3 :: Task gfsfcst, jobid=28732399, in state DEAD (FAILED), ran for 15.0 seconds, exit status=256, try=3 (of 2) +2023-06-01 20:55:04 +0000 :: fe3 :: Submitting gfsfcst +2023-06-01 20:55:04 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:34879 +2023-06-01 20:56:21 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28732419 +2023-06-01 20:56:21 +0000 :: fe3 :: Task gfsfcst, jobid=28732419, in state FAILED (FAILED), ran for 40.0 seconds, exit status=35072, try=1 (of 2) +2023-06-01 20:56:22 +0000 :: fe3 :: Submitting gfsfcst +2023-06-01 20:56:22 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:43742 +2023-06-01 21:51:49 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28732425 +2023-06-01 21:51:49 +0000 :: fe3 :: Task gfsfcst, jobid=28732425, in state SUCCEEDED (COMPLETED), ran for 2614.0 seconds, exit status=0, try=2 (of 2) +2023-06-01 21:51:49 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-06-01 21:51:49 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-06-01 21:51:49 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-06-01 21:51:49 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-06-01 21:51:49 +0000 :: fe3 :: Submitting gfspost_f024-f024 +2023-06-01 21:51:49 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28734659 +2023-06-01 21:51:49 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28734660 +2023-06-01 21:51:49 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28734661 +2023-06-01 21:51:49 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28734662 +2023-06-01 21:51:49 +0000 :: fe3 :: Submission of gfspost_f024-f024 succeeded, jobid=28734663 +2023-06-02 16:34:52 +0000 :: fe6 :: Task gfspost_f000-f000, jobid=28734659, in state FAILED (FAILED), ran for 119.0 seconds, exit status=1, try=1 (of 2) +2023-06-02 16:34:52 +0000 :: fe6 :: Task gfspost_f006-f006, jobid=28734660, in state SUCCEEDED (COMPLETED), ran for 205.0 seconds, exit status=0, try=1 (of 2) +2023-06-02 16:34:52 +0000 :: fe6 :: Task gfspost_f012-f012, jobid=28734661, in state SUCCEEDED (COMPLETED), ran for 176.0 seconds, exit status=0, try=1 (of 2) +2023-06-02 16:34:52 +0000 :: fe6 :: Task gfspost_f018-f018, jobid=28734662, in state SUCCEEDED (COMPLETED), ran for 174.0 seconds, exit status=0, try=1 (of 2) +2023-06-02 16:34:52 +0000 :: fe6 :: Task gfspost_f024-f024, jobid=28734663, in state SUCCEEDED (COMPLETED), ran for 174.0 seconds, exit status=0, try=1 (of 2) +2023-06-02 16:34:52 +0000 :: fe6 :: Submitting gfspost_f000-f000 +2023-06-02 16:34:52 +0000 :: fe6 :: Submission status of gfspost_f000-f000 is pending at druby://fe6:44351 +2023-06-02 17:55:23 +0000 :: fe5 :: Submission status of previously pending gfspost_f000-f000 is success, jobid=28777153 +2023-06-02 17:55:24 +0000 :: fe5 :: Task gfspost_f000-f000, jobid=28777153, in state SUCCEEDED (COMPLETED), ran for 118.0 seconds, exit status=0, try=2 (of 2) +2023-06-02 17:55:24 +0000 :: fe5 :: Submitting gfsvrfy +2023-06-02 17:55:24 +0000 :: fe5 :: Submission status of gfsvrfy is pending at druby://fe5:36235 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/runcmds b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/runcmds new file mode 100644 index 0000000000..f1a8d2d8cd --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_thompson.xml -d v17_p8_thompson.db +rocotostat -w v17_p8_thompson.xml -d v17_p8_thompson.db diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.crontab b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.db b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.db new file mode 100644 index 0000000000..a69c5aba88 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.db differ diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.xml b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.xml new file mode 100644 index 0000000000..d74b9c5b49 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson.xml @@ -0,0 +1,155 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + + batch + 01:40:00 + 164:ppn=24:tpp=1 + xjet + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson_lock.db b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson_lock.db new file mode 100644 index 0000000000..8f8f3e76ed Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg/v17_p8_thompson_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/12x12x2wgx12wt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aero b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.anal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.arch b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.awips b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base new file mode 100644 index 0000000000..dcdffae411 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_12x12_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.com b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.earc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.epos b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.getic b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gldas b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ice b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.init b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.metp b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.post b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.prep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wave b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/runcmds b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..f1a8d2d8cd --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_thompson.xml -d v17_p8_thompson.db +rocotostat -w v17_p8_thompson.xml -d v17_p8_thompson.db diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.db_progsigma=T b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.db_progsigma=T new file mode 100644 index 0000000000..0ac1cc8503 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.db_progsigma=T differ diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml new file mode 100644 index 0000000000..f834bf6d3c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml @@ -0,0 +1,153 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:00:00 + xjet + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/12x12x2wgx12wt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aero b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.anal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.analcalc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.analdiag b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.arch b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.awips b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base new file mode 100644 index 0000000000..431e74111b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_12x12_xjet_2wg_12wt_168h" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.com b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.earc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ecen b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.echgres b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ediag b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.efcs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.eobs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.epos b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.esfc b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.eupd b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst new file mode 120000 index 0000000000..62b79d93fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst @@ -0,0 +1 @@ +../../parm/config/gfs/config.fcst \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fit2obs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.gempak b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.getic b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.gldas b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ice b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.init b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.metp b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.nsst b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocn b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanal b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnpost b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.post b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.postsnd b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.prep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.resources b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.sfcanl b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ufs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ufs new file mode 100644 index 0000000000..6c48881832 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.vrfy b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsblending b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wave b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavegempak b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveinit b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveprep b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/runcmds b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/runcmds new file mode 100644 index 0000000000..f1a8d2d8cd --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_thompson.xml -d v17_p8_thompson.db +rocotostat -w v17_p8_thompson.xml -d v17_p8_thompson.db diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.crontab b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.db_progsigma=T b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.db_progsigma=T new file mode 100644 index 0000000000..0ac1cc8503 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.db_progsigma=T differ diff --git a/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.xml b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.xml new file mode 100644 index 0000000000..a87b6ecc0e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_12x12_xjet_2wg_12wt_168h/v17_p8_thompson.xml @@ -0,0 +1,158 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + xjet + 05:00:00 + 168:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/8x8 b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/8x8 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base new file mode 100644 index 0000000000..d1b8d44838 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_8x8_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.com b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst new file mode 100644 index 0000000000..6c579000b3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst @@ -0,0 +1,429 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF +#JKH export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +#JKH export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + progsigma=.false. + tbp="" + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst.org b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst.org new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst.org @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.init b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.post b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ufs new file mode 100644 index 0000000000..580431cf19 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.ufs @@ -0,0 +1,371 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=8 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=8 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/jkhINFO b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/jkhINFO new file mode 100644 index 0000000000..17b3dad19c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/jkhINFO @@ -0,0 +1,5 @@ + +May29 + + need v17p8_thompson SDF and will need to recompile!! + need namelist parameters... diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/logs/2022111000.log new file mode 100644 index 0000000000..c066eca3bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/logs/2022111000.log @@ -0,0 +1,50 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-28 07:30:29 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28493642 +2023-05-28 07:30:30 +0000 :: fe3 :: Task gfsfcst, jobid=28493642, in state DEAD (TIMEOUT), ran for 2404.0 seconds, exit status=15, try=2 (of 2) +2023-05-28 07:34:01 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 07:34:01 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:39381 +2023-05-28 19:08:14 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28495562 +2023-05-28 19:08:14 +0000 :: fe3 :: Task gfsfcst, jobid=28495562, in state FAILED (TIMEOUT), ran for 2413.0 seconds, exit status=255, try=1 (of 2) +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-28 19:08:15 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28521683 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28521684 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28521685 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28521686 +2023-05-28 19:08:15 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28521687 +2023-05-29 07:52:24 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 07:52:24 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28547487 +2023-05-29 08:33:19 +0000 :: fe3 :: Task gfsfcst, jobid=28547487, in state FAILED (FAILED), ran for 263.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 08:33:19 +0000 :: fe3 :: Submitting gfsfcst +2023-05-29 08:33:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:46395 +2023-05-29 22:41:40 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 22:41:45 +0000 :: fe5 :: Submission of gfsfcst succeeded, jobid=28578743 +2023-05-29 22:47:52 +0000 :: fe5 :: Task gfsfcst, jobid=28578743, in state FAILED (CANCELLED), ran for 0.0 seconds, exit status=255, try=1 (of 2) +2023-05-29 22:47:52 +0000 :: fe5 :: Submitting gfsfcst +2023-05-29 22:47:52 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:35701 +2023-05-29 22:50:42 +0000 :: fe5 :: Submission status of previously pending gfsfcst is success, jobid=28578994 +2023-05-29 22:50:42 +0000 :: fe5 :: Task gfsfcst, jobid=28578994, in state RUNNING (RUNNING) +2023-06-01 01:41:26 +0000 :: fe5 :: Task gfsfcst, jobid=28578994, in state DEAD (FAILED), ran for 183.0 seconds, exit status=11, try=2 (of 2) +2023-06-01 01:43:09 +0000 :: fe5 :: Forcibly submitting gfsfcst +2023-06-01 01:43:09 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:36952 +2023-06-01 18:39:57 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28691806 +2023-06-01 18:40:01 +0000 :: fe3 :: Task gfsfcst, jobid=28691806, in state UNKNOWN (CANCELLED by 1254) +2023-06-01 18:40:24 +0000 :: fe3 :: Task gfsfcst, jobid=28691806, in state UNKNOWN (CANCELLED by 1254) +2023-06-01 18:40:30 +0000 :: fe3 :: Submitting gfsfcst +2023-06-01 18:40:30 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28727434 +2023-06-01 18:44:19 +0000 :: fe3 :: Task gfsfcst, jobid=28727434, in state FAILED (CANCELLED), ran for 111.0 seconds, exit status=15, try=1 (of 2) +2023-06-01 18:44:19 +0000 :: fe3 :: Submitting gfsfcst +2023-06-01 18:44:19 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36686 diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/runcmds new file mode 100644 index 0000000000..f1a8d2d8cd --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_thompson.xml -d v17_p8_thompson.db +rocotostat -w v17_p8_thompson.xml -d v17_p8_thompson.db diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.crontab b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.db b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.db new file mode 100644 index 0000000000..53a5f5f75b Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.db differ diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.xml b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.xml new file mode 100644 index 0000000000..caedede99e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson.xml @@ -0,0 +1,154 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + hfv3gfs + + batch + 01:40:00 + 104:ppn=24:tpp=1 + xjet + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson_lock.db b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson_lock.db new file mode 100644 index 0000000000..aea8cbe212 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_thompson_8x8_xjet/v17_p8_thompson_lock.db differ diff --git a/FV3GFSwfm/testing/v17_p8_xjet/8x8 b/FV3GFSwfm/testing/v17_p8_xjet/8x8 new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.aero b/FV3GFSwfm/testing/v17_p8_xjet/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanl b/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlfinal b/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlinit b/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlrun b/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.aerosol_init b/FV3GFSwfm/testing/v17_p8_xjet/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.anal b/FV3GFSwfm/testing/v17_p8_xjet/config.anal new file mode 100644 index 0000000000..018bab9597 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=56,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${CDATE}" -ge "2020040718" && "${CDATE}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${CDATE}" -ge "2020052612" && "${CDATE}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${CDATE}" -ge "2020082412" && "${CDATE}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${CDATE}" -ge "2020091612" && "${CDATE}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${CDATE}" -ge "2021031712" && "${CDATE}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${CDATE}" -ge "2020011600" && "${CDATE}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${CDATE}" -ge "2019021900" && "${CDATE}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${CDATE}" -ge "2019110706" && "${CDATE}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${CDATE}" -ge "2020022012" && "${CDATE}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${CDATE}" -ge "2021052118" && "${CDATE}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${CDATE}" -ge "2021092206" && "${CDATE}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.analcalc b/FV3GFSwfm/testing/v17_p8_xjet/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.analdiag b/FV3GFSwfm/testing/v17_p8_xjet/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.arch b/FV3GFSwfm/testing/v17_p8_xjet/config.arch new file mode 100644 index 0000000000..c705e0b7ed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.arch @@ -0,0 +1,29 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. $EXPDIR/config.resources arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +#--keep forcing data for running gldas step +if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then + [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 +fi + +echo "END: config.arch" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.atmanl b/FV3GFSwfm/testing/v17_p8_xjet/config.atmanl new file mode 100644 index 0000000000..c0cd9e6733 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.atmanl @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlfinal b/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlinit b/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlrun b/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanl b/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanl new file mode 100755 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlfinal b/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlfinal new file mode 100755 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlinit b/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlinit new file mode 100755 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlrun b/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlrun new file mode 100755 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.awips b/FV3GFSwfm/testing/v17_p8_xjet/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.base b/FV3GFSwfm/testing/v17_p8_xjet/config.base new file mode 100644 index 0000000000..ad17ca90ec --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.base @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_xjet" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_EMC b/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_EMC new file mode 100755 index 0000000000..0af91daaed --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_EMC @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=emc-global + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_hera b/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_hera new file mode 100755 index 0000000000..b3a41848b7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_hera @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_jet b/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_jet new file mode 100755 index 0000000000..3ed164af37 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.base.emc.dyn_jet @@ -0,0 +1,389 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.com b/FV3GFSwfm/testing/v17_p8_xjet/config.com new file mode 100644 index 0000000000..40cba6da5a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.com @@ -0,0 +1,92 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.coupled_ic b/FV3GFSwfm/testing/v17_p8_xjet/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.defaults.s2sw b/FV3GFSwfm/testing/v17_p8_xjet/config.defaults.s2sw new file mode 100644 index 0000000000..7f751e02bc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.defaults.s2sw @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave + +waveGRD='mx025' +waveinterpGRD='reg025' +waveuoutpGRD='mx025' +MESH_WAV='mesh.mx025.nc' + +waveesmfGRD=' ' +wavepostGRD=' ' +waveGRDN="1" +waveGRDG="10" +USE_WAV_RMP="NO" +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' +waveMULTIGRID='.false.' + +# config.waveprep +IOSRV=1 + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.earc b/FV3GFSwfm/testing/v17_p8_xjet/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ecen b/FV3GFSwfm/testing/v17_p8_xjet/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.echgres b/FV3GFSwfm/testing/v17_p8_xjet/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ediag b/FV3GFSwfm/testing/v17_p8_xjet/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.efcs b/FV3GFSwfm/testing/v17_p8_xjet/config.efcs new file mode 100644 index 0000000000..a9b410e416 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 $CASE_ENKF" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "$CASE_ENKF" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.eobs b/FV3GFSwfm/testing/v17_p8_xjet/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.epos b/FV3GFSwfm/testing/v17_p8_xjet/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.esfc b/FV3GFSwfm/testing/v17_p8_xjet/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.eupd b/FV3GFSwfm/testing/v17_p8_xjet/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.fcst b/FV3GFSwfm/testing/v17_p8_xjet/config.fcst new file mode 120000 index 0000000000..4adec594e8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.fcst @@ -0,0 +1 @@ +config.fcst_orig \ No newline at end of file diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.fcst_gsl b/FV3GFSwfm/testing/v17_p8_xjet/config.fcst_gsl new file mode 100644 index 0000000000..7f4d5d72e1 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.fcst_gsl @@ -0,0 +1,481 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) + + #--used for GSL suite +##JKH export knob_ugwp_version=0 +##JKH export do_ugwp=".false." +##JKH export do_tofd="true." +##JKH export do_ugwp_v0=".true." +##JKH export do_ugwp_v1=".false." +##JKH export do_ugwp_v0_orog_only=".false." +##JKH export do_ugwp_v0_nst_only=".false." +##JKH export do_gsl_drag_ls_bl=".false." +##JKH export do_gsl_drag_ss=".true." +##JKH export do_gsl_drag_tofd=".false." +##JKH export do_ugwp_v1_orog_only=".false." +##JKH export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +### JKH +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then + export d2_bg_k2=0.15 ### JKH - 10dec + export dz_min=2 + export dt_inner=40. ### JKH - 10dec +else + export d2_bg_k2=0.04 + export dz_min=6 +fi +if [[ ${LEVS} = "128" ]]; then export n_sponge=42; fi #127 layer +if [[ ${LEVS} = "65" ]]; then + if [[ "CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then + export n_sponge=23 + else + export n_sponge=42 + fi +fi +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export shal_cnv=".false." + else + export shal_cnv=".true." + fi + export do_mynnedmf=".true." + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then + export do_mynnsfclay=".true." + else + export do_mynnsfclay=".false." + fi + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [[ "$progsigma" = ".true." ]]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=-1 ## JKH - no shallow GF + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 + export progsigma=.false. +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + export progsigma=.false. + else + export imfshalcnv=2 + fi +fi + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + export dt_inner=40. ### JKH - 10dec + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + export sedi_semi=.true. + export decfl=10 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=".true." + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + export ltaerosol=".false." + fi + export lradar=".false." + export dt_inner=$((DELTIM/2)) + if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [ $imp_physics -eq 11 ]; then # GFDL + export ncld=5 + if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" + else + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + fi + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" + else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + fi + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.fcst_orig b/FV3GFSwfm/testing/v17_p8_xjet/config.fcst_orig new file mode 100644 index 0000000000..e8560be4ca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.fcst_orig @@ -0,0 +1,364 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +export satmedmf=".true." +export isatmedmf=1 +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +#Convection schemes +export progsigma=".true." +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export ltaerosol=".false." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export dddmp=0.1 + export d4_bg=0.12 +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.fit2obs b/FV3GFSwfm/testing/v17_p8_xjet/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.gempak b/FV3GFSwfm/testing/v17_p8_xjet/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.getic b/FV3GFSwfm/testing/v17_p8_xjet/config.getic new file mode 100644 index 0000000000..fce3f9ecf6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDASINIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.gldas b/FV3GFSwfm/testing/v17_p8_xjet/config.gldas new file mode 100644 index 0000000000..c51829d9fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.gldas @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.gldas ########## +# GDAS gldas step specific + +echo "BEGIN: config.gldas" + +# Get task specific resources +. $EXPDIR/config.resources gldas + +export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh +export gldas_spinup_hours=72 +export CPCGAUGE=$DMPDIR +export FINDDATE=$USHgfs/finddate.sh + +echo "END: config.gldas" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ice b/FV3GFSwfm/testing/v17_p8_xjet/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.init b/FV3GFSwfm/testing/v17_p8_xjet/config.init new file mode 100644 index 0000000000..3e016fb248 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.init @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDASINIT_DIR=${UFS_DIR}/util/gdas_init + +export CRES_HIRES=$CASE +export CRES_ENKF=$CASE_ENKF +export FRAC_ORO="yes" + +export RUNICSH=${GDASINIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDASINIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDASINIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDASINIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.landanl b/FV3GFSwfm/testing/v17_p8_xjet/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.landanlfinal b/FV3GFSwfm/testing/v17_p8_xjet/config.landanlfinal new file mode 100755 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.landanlinit b/FV3GFSwfm/testing/v17_p8_xjet/config.landanlinit new file mode 100755 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.landanlrun b/FV3GFSwfm/testing/v17_p8_xjet/config.landanlrun new file mode 100755 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.metp b/FV3GFSwfm/testing/v17_p8_xjet/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.nsst b/FV3GFSwfm/testing/v17_p8_xjet/config.nsst new file mode 100644 index 0000000000..b4c58eedb3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "$CDATE" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocn b/FV3GFSwfm/testing/v17_p8_xjet/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanal b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalbmat b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalchkpt b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalpost b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalprep b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalrun b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalvrfy b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ocnpost b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.post b/FV3GFSwfm/testing/v17_p8_xjet/config.post new file mode 100644 index 0000000000..71b0fe79d2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.post @@ -0,0 +1,36 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +export npe_dwn=24 + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.postsnd b/FV3GFSwfm/testing/v17_p8_xjet/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.prep b/FV3GFSwfm/testing/v17_p8_xjet/config.prep new file mode 100644 index 0000000000..3e1cf8c32f --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "$CDATE" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.resources b/FV3GFSwfm/testing/v17_p8_xjet/config.resources new file mode 100644 index 0000000000..1e5b747982 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.resources @@ -0,0 +1,971 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "getic init coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag gldas fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ ${step} = "gldas" ]]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + npe_node_gldas=$(echo "${npe_node_max} / ${nth_gldas}" | bc) + export npe_node_gldas + export npe_gaussian=96 + export nth_gaussian=1 + npe_node_gaussian=$(echo "${npe_node_max} / ${nth_gaussian}" | bc) + export npe_node_gaussian + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" || ${step} = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.resources.nco.static b/FV3GFSwfm/testing/v17_p8_xjet/config.resources.nco.static new file mode 100644 index 0000000000..e6cd2ef73e --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.resources.nco.static @@ -0,0 +1,354 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "gldas" ]; then + + export wtime_gldas="00:10:00" + export npe_gldas=112 + export nth_gldas=1 + export npe_node_gldas=$npe_gldas + export npe_gaussian=96 + export nth_gaussian=1 + export npe_node_gaussian=$(echo "$npe_node_max / $nth_gaussian" | bc) + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.sfcanl b/FV3GFSwfm/testing/v17_p8_xjet/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.ufs b/FV3GFSwfm/testing/v17_p8_xjet/config.ufs new file mode 100644 index 0000000000..580431cf19 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.ufs @@ -0,0 +1,371 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|mx050|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=8 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=8 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.vrfy b/FV3GFSwfm/testing/v17_p8_xjet/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wafs b/FV3GFSwfm/testing/v17_p8_xjet/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wafsblending b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wafsblending0p25 b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgcip b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgrib2 b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgrib20p25 b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wave b/FV3GFSwfm/testing/v17_p8_xjet/config.wave new file mode 100644 index 0000000000..658c4b40ae --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wave @@ -0,0 +1,159 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +# GFSv16 +export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} +export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi +export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi +export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients +export waveMULTIGRID=${waveMULTIGRID:-'.true.'} +export MESH_WAV=${MESH_WAV:-'mesh.gwes_30m.nc'} + +# ESMF input grid +export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-'points'} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'3'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.waveawipsbulls b/FV3GFSwfm/testing/v17_p8_xjet/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.waveawipsgridded b/FV3GFSwfm/testing/v17_p8_xjet/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wavegempak b/FV3GFSwfm/testing/v17_p8_xjet/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.waveinit b/FV3GFSwfm/testing/v17_p8_xjet/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostbndpnt b/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostbndpntbll b/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostpnt b/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostsbs b/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostsbs new file mode 100644 index 0000000000..4faa4f5ebe --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.wavepostsbs @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +export DOGRI_WAV='YES' # Create interpolated grids +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/config.waveprep b/FV3GFSwfm/testing/v17_p8_xjet/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/testing/v17_p8_xjet/logs/2022111000.log b/FV3GFSwfm/testing/v17_p8_xjet/logs/2022111000.log new file mode 100644 index 0000000000..3ac53a2810 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/logs/2022111000.log @@ -0,0 +1,51 @@ +2023-05-28 01:31:38 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:31:38 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:36854 +2023-05-28 01:33:22 +0000 :: fe3 :: Submission status of previously pending gfsfcst is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-05-28 01:33:22 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 01:33:23 +0000 :: fe3 :: Submission of gfsfcst succeeded, jobid=28483216 +2023-05-28 01:34:46 +0000 :: fe3 :: Task gfsfcst, jobid=28483216, in state QUEUED (PENDING) +2023-05-28 06:05:19 +0000 :: fe2 :: Submitting gfsfcst +2023-05-28 06:05:22 +0000 :: fe2 :: Submission of gfsfcst succeeded, jobid=28492564 +2023-05-28 06:38:42 +0000 :: fe3 :: Task gfsfcst, jobid=28492564, in state FAILED (FAILED), ran for 219.0 seconds, exit status=11, try=1 (of 2) +2023-05-28 06:38:42 +0000 :: fe3 :: Submitting gfsfcst +2023-05-28 06:38:42 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:33495 +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfsfcst, jobid=28566454, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f000-f000, jobid=28566455, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f006-f006, jobid=28566457, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Task gfspost_f012-f012, jobid=28566459, in state FAILED (CANCELLED), ran for 20.0 seconds, exit status=15, try=1 (of 2) +2023-05-29 16:48:27 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 16:48:27 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:32912 +2023-05-29 16:48:50 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28566687 +2023-05-29 16:48:50 +0000 :: fe2 :: Task gfsfcst, jobid=28566687, in state RUNNING (RUNNING) +2023-05-29 16:49:10 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 16:49:10 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:38302 +2023-05-29 17:17:08 +0000 :: fe2 :: Submission status of previously pending gfsfcst is success, jobid=28566693 +2023-05-29 17:17:08 +0000 :: fe2 :: Task gfsfcst, jobid=28566693, in state FAILED (FAILED), ran for 178.0 seconds, exit status=11, try=1 (of 2) +2023-05-29 17:17:08 +0000 :: fe2 :: Submitting gfsfcst +2023-05-29 17:17:08 +0000 :: fe2 :: Submission status of gfsfcst is pending at druby://fe2:39108 +2023-05-29 18:15:18 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28567880 +2023-05-29 18:15:26 +0000 :: fe3 :: Task gfsfcst, jobid=28567880, in state DEAD (TIMEOUT), ran for 2416.0 seconds, exit status=255, try=2 (of 2) +2023-05-29 18:15:26 +0000 :: fe3 :: Submitting gfspost_f000-f000 +2023-05-29 18:15:26 +0000 :: fe3 :: Submitting gfspost_f006-f006 +2023-05-29 18:15:26 +0000 :: fe3 :: Submitting gfspost_f012-f012 +2023-05-29 18:15:26 +0000 :: fe3 :: Submission of gfspost_f000-f000 succeeded, jobid=28570041 +2023-05-29 18:15:26 +0000 :: fe3 :: Submission of gfspost_f006-f006 succeeded, jobid=28570042 +2023-05-29 18:15:26 +0000 :: fe3 :: Submission of gfspost_f012-f012 succeeded, jobid=28570043 +2023-05-29 18:15:49 +0000 :: fe3 :: Task gfspost_f000-f000, jobid=28570041, in state RUNNING (RUNNING) +2023-05-29 18:15:49 +0000 :: fe3 :: Task gfspost_f006-f006, jobid=28570042, in state RUNNING (RUNNING) +2023-05-29 18:15:49 +0000 :: fe3 :: Task gfspost_f012-f012, jobid=28570043, in state RUNNING (RUNNING) +2023-05-29 18:15:49 +0000 :: fe3 :: Forcibly submitting gfsfcst +2023-05-29 18:15:49 +0000 :: fe3 :: Submission status of gfsfcst is pending at druby://fe3:46659 +2023-05-29 20:14:07 +0000 :: fe3 :: Submission status of previously pending gfsfcst is success, jobid=28570044 +2023-05-29 20:14:08 +0000 :: fe3 :: Task gfsfcst, jobid=28570044, in state SUCCEEDED (COMPLETED), ran for 4494.0 seconds, exit status=0, try=4 (of 2) +2023-05-29 20:14:08 +0000 :: fe3 :: Task gfspost_f000-f000, jobid=28570041, in state SUCCEEDED (COMPLETED), ran for 136.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 20:14:08 +0000 :: fe3 :: Task gfspost_f006-f006, jobid=28570042, in state SUCCEEDED (COMPLETED), ran for 154.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 20:14:08 +0000 :: fe3 :: Task gfspost_f012-f012, jobid=28570043, in state SUCCEEDED (COMPLETED), ran for 157.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 20:14:08 +0000 :: fe3 :: Submitting gfspost_f018-f018 +2023-05-29 20:14:09 +0000 :: fe3 :: Submitting gfspost_f024-f024 +2023-05-29 20:14:09 +0000 :: fe3 :: Submission of gfspost_f018-f018 succeeded, jobid=28574064 +2023-05-29 20:14:09 +0000 :: fe3 :: Submission of gfspost_f024-f024 succeeded, jobid=28574065 +2023-05-29 20:22:08 +0000 :: fe3 :: Task gfspost_f018-f018, jobid=28574064, in state SUCCEEDED (COMPLETED), ran for 144.0 seconds, exit status=0, try=1 (of 2) +2023-05-29 20:22:09 +0000 :: fe3 :: Task gfspost_f024-f024, jobid=28574065, in state FAILED (FAILED), ran for 97.0 seconds, exit status=256, try=1 (of 2) +2023-05-29 20:22:09 +0000 :: fe3 :: Submitting gfspost_f024-f024 +2023-05-29 20:22:09 +0000 :: fe3 :: Submission status of gfspost_f024-f024 is pending at druby://fe3:40089 diff --git a/FV3GFSwfm/testing/v17_p8_xjet/runcmds b/FV3GFSwfm/testing/v17_p8_xjet/runcmds new file mode 100644 index 0000000000..331bec8558 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8.xml -d v17_p8.db +rocotostat -w v17_p8.xml -d v17_p8.db diff --git a/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.crontab b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.crontab new file mode 100644 index 0000000000..678c56bd66 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.crontab @@ -0,0 +1,5 @@ + +#################### v17_p8 #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_01may23/FV3GFSwfm/v17_p8/v17_p8.xml +################################################################# diff --git a/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.db b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.db new file mode 100644 index 0000000000..533e0f18a7 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.db differ diff --git a/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.xml b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.xml new file mode 100644 index 0000000000..2220ff8663 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8.xml @@ -0,0 +1,151 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + batch + 01:40:00 + xjet + 104:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 01:00:00 + 4:ppn=24:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_xjet/v17_p8_ALL.xml b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8_ALL.xml new file mode 100644 index 0000000000..1310a1eab7 --- /dev/null +++ b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8_ALL.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + +]> + + + + &HOMEgfs;/FV3GFSwfm/&PSLOT;/logs/@Y@m@d@H.log + + + 202211100000 202211100000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + + debug + 00:30:00 + vjet + 232:ppn=16:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 01:00:00 + 4:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + vjet + 06:00:00 + 1:ppn=1:tpp=1 + 16384M + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&HOMEgfs;/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + + + + diff --git a/FV3GFSwfm/testing/v17_p8_xjet/v17_p8_lock.db b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8_lock.db new file mode 100644 index 0000000000..ea13b3df99 Binary files /dev/null and b/FV3GFSwfm/testing/v17_p8_xjet/v17_p8_lock.db differ diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/12x12x1wgx14wt b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/12x12x1wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/1node_post b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/1node_post new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aero b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanl b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aerosol_init b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.anal b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.analcalc b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.analdiag b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.arch b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanl b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlinit b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlrun b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanl b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.awips b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.base b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.base new file mode 100644 index 0000000000..a0d81ddf51 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.base @@ -0,0 +1,384 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_POST_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${ROTDIR}/.." +export PTMP="${ROTDIR}/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_12x12_2wg_12wt" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#JKHexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#JKHexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#JKHexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#JKHexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ### JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.com b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.coupled_ic b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.earc b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ecen b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.echgres b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ediag b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.efcs b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.eobs b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.epos b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.esfc b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.eupd b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.fcst b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.fit2obs b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.gempak b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ice b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanl b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlfinal b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlinit b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlrun b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.metp b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.nsst b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocn b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanal b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnpost b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.post b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.postsnd b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.prep b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.preplandobs b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.resources b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.sfcanl b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ufs b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ufs new file mode 100644 index 0000000000..5d3a16b329 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.vrfy b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafs b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsblending b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgcip b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wave b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavegempak b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveinit b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveprep b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/runcmds b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/runcmds new file mode 100644 index 0000000000..ac5942694a --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/runcmds @@ -0,0 +1,7 @@ + +rocotorun -w test-p8.xml -d test-p8.db +rocotostat -w test-p8.xml -d test-p8.db + +rocotorun -w storms-p8.xml -d storms-p8.db +rocotostat -w storms-p8.xml -d storms-p8.db + diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/storms-p8.xml b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/storms-p8.xml new file mode 100644 index 0000000000..15916e26f9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/storms-p8.xml @@ -0,0 +1,161 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 201908281200 201909241200 27:00:00:00 + 201909141800 201910060000 21:06:00:00 + 201907020600 201912221200 173:06:00:00 + 201907110000 201909141200 65:12:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 02:30:00 + 101:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:10:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_12x12_2wg_12wt/test-p8.xml b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/test-p8.xml new file mode 100644 index 0000000000..f1160db34b --- /dev/null +++ b/FV3GFSwfm/v17_p8_12x12_2wg_12wt/test-p8.xml @@ -0,0 +1,157 @@ + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:00:00 + 101:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/12x12x1wgx14wt b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/12x12x1wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/1node_post b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/1node_post new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aero b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanl b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aerosol_init b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.anal b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.analcalc b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.analdiag b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.arch b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanl b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlinit b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlrun b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanl b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.awips b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.base b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.base new file mode 100644 index 0000000000..b5375c1b6d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.base @@ -0,0 +1,384 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_POST_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${ROTDIR}/.." +export PTMP="${ROTDIR}/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_1wg_14wt" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#JKHexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#JKHexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#JKHexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#JKHexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ### JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.com b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.coupled_ic b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.earc b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ecen b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.echgres b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ediag b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.efcs b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.eobs b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.epos b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.esfc b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.eupd b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.fcst b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.fit2obs b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.gempak b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ice b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanl b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlinit b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlrun b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.metp b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.nsst b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocn b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanal b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnpost b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.post b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.postsnd b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.prep b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.preplandobs b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.resources b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.sfcanl b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ufs b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ufs new file mode 100644 index 0000000000..7566ab32ae --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.ufs @@ -0,0 +1,378 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=1 + if [[ "${machine}" == "HERA" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + fi + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.vrfy b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafs b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsblending b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgcip b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wave b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavegempak b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveinit b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveprep b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/runcmds b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/runcmds new file mode 100644 index 0000000000..e15e58e10b --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/runcmds @@ -0,0 +1,6 @@ + +rocotorun -w storms-c3.xml -d storms-c3.db +rocotostat -w storms-c3.xml -d storms-c3.db + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/storms-c3.xml b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/storms-c3.xml new file mode 100644 index 0000000000..dccb34dcac --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/storms-c3.xml @@ -0,0 +1,159 @@ + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 201908281200 201909241200 27:00:00:00 + 201909141800 201910060000 21:06:00:00 + 201907020600 201912221200 173:06:00:00 + 201907110000 201909141200 65:12:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 02:40:00 + 95:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + diff --git a/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/v17_p8_c3.xml b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/v17_p8_c3.xml new file mode 100644 index 0000000000..d40901525a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_1wg_14wt/v17_p8_c3.xml @@ -0,0 +1,155 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 02:40:00 + 95:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/12x12x2wgx12wt b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/1node_post b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/1node_post new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aero b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanl b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aerosol_init b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.anal b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.analcalc b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.analdiag b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.arch b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanl b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlinit b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlrun b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanl b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.awips b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.base b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.base new file mode 100644 index 0000000000..8163cd368d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.base @@ -0,0 +1,383 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${ROTDIR}/.." +export PTMP="${ROTDIR}/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_2wg_12wt" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#JKHexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#JKHexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#JKHexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#JKHexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ### JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.com b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.coupled_ic b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.earc b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ecen b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.echgres b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ediag b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.efcs b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.eobs b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.epos b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.esfc b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.eupd b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.fcst b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.fit2obs b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.gempak b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ice b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanl b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlinit b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlrun b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.metp b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.nsst b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocn b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanal b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnpost b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.post b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.postsnd b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.prep b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.preplandobs b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.resources b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.sfcanl b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ufs b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ufs new file mode 100644 index 0000000000..cf4ca93683 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.ufs @@ -0,0 +1,378 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + if [[ "${machine}" == "HERA" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + fi + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.vrfy b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafs b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsblending b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgcip b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wave b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavegempak b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveinit b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveprep b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/runcmds b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.xml b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.xml new file mode 100644 index 0000000000..83cc120779 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_2wg_12wt/v17_p8_c3.xml @@ -0,0 +1,163 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:00:00 + + 101:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aero b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.anal b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.arch b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.awips b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.base b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.base new file mode 100644 index 0000000000..1fc46c3189 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.base @@ -0,0 +1,379 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs1/BMC/gsd-fv3-test/NCEPDEV/global/$USER" +export STMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export PTMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_c3_12x12_xjet_1wg_14wt" +export EXPDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_c3" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.com b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.earc b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ecen b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.echgres b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ediag b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.efcs b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.eobs b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.epos b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.esfc b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.eupd b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.gempak b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ice b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanl b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.metp b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.nsst b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocn b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.post b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.prep b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.preplandobs b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ufs b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ufs new file mode 100644 index 0000000000..cf4ca93683 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.ufs @@ -0,0 +1,378 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + if [[ "${machine}" == "HERA" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + fi + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafs b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wave b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/logs/2022110900.log b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/logs/2022110900.log new file mode 100644 index 0000000000..7d4bdbc020 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/logs/2022110900.log @@ -0,0 +1,123 @@ +2023-06-17 09:04:34 +0000 :: fe4 :: Submitting gfsgetic +2023-06-17 09:04:34 +0000 :: fe4 :: Submission status of gfsgetic is pending at druby://fe4:33050 +2023-06-17 09:06:33 +0000 :: fe4 :: Submission status of previously pending gfsgetic is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-06-17 09:06:33 +0000 :: fe4 :: Submitting gfsgetic +2023-06-17 09:06:33 +0000 :: fe4 :: Submission status of gfsgetic is pending at druby://fe4:44603 +2023-06-17 09:08:10 +0000 :: fe4 :: Submission status of previously pending gfsgetic is failure! sbatch: error: Batch job submission failed: Invalid qos specification +2023-06-17 09:08:11 +0000 :: fe4 :: Submitting gfsgetic +2023-06-17 09:08:11 +0000 :: fe4 :: Submission status of gfsgetic is pending at druby://fe4:41872 +2023-06-17 09:08:20 +0000 :: fe4 :: Submission status of previously pending gfsgetic is success, jobid=29549646 +2023-06-17 09:08:20 +0000 :: fe4 :: Task gfsgetic, jobid=29549646, in state SUCCEEDED (COMPLETED), ran for 2.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 09:08:20 +0000 :: fe4 :: Submitting gfsfcst +2023-06-17 09:08:20 +0000 :: fe4 :: Submission status of gfsfcst is pending at druby://fe4:40026 +2023-06-17 09:09:03 +0000 :: fe4 :: Submission status of previously pending gfsfcst is success, jobid=29549647 +2023-06-17 09:09:04 +0000 :: fe4 :: Task gfsfcst, jobid=29549647, in state QUEUED (PENDING) +2023-06-17 15:15:42 +0000 :: fe6 :: Task gfsfcst, jobid=29549647, in state SUCCEEDED (COMPLETED), ran for 14254.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f000-f000 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f006-f006 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f012-f012 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f018-f018 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f024-f024 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f030-f030 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f036-f036 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f042-f042 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f048-f048 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f054-f054 +2023-06-17 15:15:42 +0000 :: fe6 :: Submitting gfspost_f060-f060 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f066-f066 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f072-f072 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f078-f078 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f084-f084 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f090-f090 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f096-f096 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f102-f102 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f108-f108 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f114-f114 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f120-f120 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f126-f126 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f132-f132 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f138-f138 +2023-06-17 15:15:43 +0000 :: fe6 :: Submitting gfspost_f144-f144 +2023-06-17 15:15:43 +0000 :: fe6 :: Cannot submit gfspost_f150-f150, because maximum global task throttle of 25 will be violated. +2023-06-17 15:15:43 +0000 :: fe6 :: Cannot submit gfspost_f156-f156, because maximum global task throttle of 25 will be violated. +2023-06-17 15:15:43 +0000 :: fe6 :: Cannot submit gfspost_f162-f162, because maximum global task throttle of 25 will be violated. +2023-06-17 15:15:43 +0000 :: fe6 :: Cannot submit gfspost_f168-f168, because maximum global task throttle of 25 will be violated. +2023-06-17 15:15:43 +0000 :: fe6 :: Submission of gfspost_f000-f000 succeeded, jobid=29564320 +2023-06-17 15:15:43 +0000 :: fe6 :: Submission of gfspost_f006-f006 succeeded, jobid=29564321 +2023-06-17 15:15:43 +0000 :: fe6 :: Submission of gfspost_f012-f012 succeeded, jobid=29564322 +2023-06-17 15:15:43 +0000 :: fe6 :: Submission of gfspost_f018-f018 succeeded, jobid=29564323 +2023-06-17 15:15:43 +0000 :: fe6 :: Submission of gfspost_f024-f024 succeeded, jobid=29564324 +2023-06-17 15:15:44 +0000 :: fe6 :: Submission of gfspost_f030-f030 succeeded, jobid=29564326 +2023-06-17 15:15:45 +0000 :: fe6 :: Submission of gfspost_f036-f036 succeeded, jobid=29564327 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f042-f042 succeeded, jobid=29564329 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f048-f048 succeeded, jobid=29564330 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f054-f054 succeeded, jobid=29564331 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f060-f060 succeeded, jobid=29564332 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f066-f066 succeeded, jobid=29564333 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f072-f072 succeeded, jobid=29564334 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f078-f078 succeeded, jobid=29564335 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f084-f084 succeeded, jobid=29564336 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f090-f090 succeeded, jobid=29564337 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f096-f096 succeeded, jobid=29564338 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f102-f102 succeeded, jobid=29564339 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f108-f108 succeeded, jobid=29564340 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f114-f114 succeeded, jobid=29564341 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f120-f120 succeeded, jobid=29564342 +2023-06-17 15:15:46 +0000 :: fe6 :: Submission of gfspost_f126-f126 succeeded, jobid=29564343 +2023-06-17 15:15:47 +0000 :: fe6 :: Submission of gfspost_f132-f132 succeeded, jobid=29564344 +2023-06-17 15:15:47 +0000 :: fe6 :: Submission of gfspost_f138-f138 succeeded, jobid=29564345 +2023-06-17 15:15:47 +0000 :: fe6 :: Submission of gfspost_f144-f144 succeeded, jobid=29564346 +2023-06-17 15:21:39 +0000 :: fe6 :: Task gfspost_f000-f000, jobid=29564320, in state SUCCEEDED (COMPLETED), ran for 141.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:39 +0000 :: fe6 :: Task gfspost_f006-f006, jobid=29564321, in state SUCCEEDED (COMPLETED), ran for 158.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f012-f012, jobid=29564322, in state SUCCEEDED (COMPLETED), ran for 151.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f018-f018, jobid=29564323, in state SUCCEEDED (COMPLETED), ran for 155.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f024-f024, jobid=29564324, in state SUCCEEDED (COMPLETED), ran for 153.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f030-f030, jobid=29564326, in state SUCCEEDED (COMPLETED), ran for 153.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f036-f036, jobid=29564327, in state SUCCEEDED (COMPLETED), ran for 153.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f042-f042, jobid=29564329, in state FAILED (FAILED), ran for 104.0 seconds, exit status=256, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f048-f048, jobid=29564330, in state SUCCEEDED (COMPLETED), ran for 156.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f054-f054, jobid=29564331, in state FAILED (FAILED), ran for 103.0 seconds, exit status=256, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f060-f060, jobid=29564332, in state SUCCEEDED (COMPLETED), ran for 147.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f066-f066, jobid=29564333, in state SUCCEEDED (COMPLETED), ran for 161.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f072-f072, jobid=29564334, in state SUCCEEDED (COMPLETED), ran for 152.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f078-f078, jobid=29564335, in state SUCCEEDED (COMPLETED), ran for 149.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f084-f084, jobid=29564336, in state SUCCEEDED (COMPLETED), ran for 154.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:40 +0000 :: fe6 :: Task gfspost_f090-f090, jobid=29564337, in state SUCCEEDED (COMPLETED), ran for 155.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:41 +0000 :: fe6 :: Task gfspost_f096-f096, jobid=29564338, in state SUCCEEDED (COMPLETED), ran for 152.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:41 +0000 :: fe6 :: Task gfspost_f102-f102, jobid=29564339, in state SUCCEEDED (COMPLETED), ran for 151.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:41 +0000 :: fe6 :: Task gfspost_f108-f108, jobid=29564340, in state SUCCEEDED (COMPLETED), ran for 158.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:41 +0000 :: fe6 :: Task gfspost_f114-f114, jobid=29564341, in state SUCCEEDED (COMPLETED), ran for 156.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:42 +0000 :: fe6 :: Task gfspost_f120-f120, jobid=29564342, in state SUCCEEDED (COMPLETED), ran for 154.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:42 +0000 :: fe6 :: Task gfspost_f126-f126, jobid=29564343, in state SUCCEEDED (COMPLETED), ran for 154.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:42 +0000 :: fe6 :: Task gfspost_f132-f132, jobid=29564344, in state SUCCEEDED (COMPLETED), ran for 151.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:42 +0000 :: fe6 :: Task gfspost_f138-f138, jobid=29564345, in state SUCCEEDED (COMPLETED), ran for 158.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:42 +0000 :: fe6 :: Task gfspost_f144-f144, jobid=29564346, in state SUCCEEDED (COMPLETED), ran for 148.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:21:42 +0000 :: fe6 :: Submitting gfspost_f042-f042 +2023-06-17 15:21:42 +0000 :: fe6 :: Submitting gfspost_f054-f054 +2023-06-17 15:21:42 +0000 :: fe6 :: Submitting gfspost_f150-f150 +2023-06-17 15:21:43 +0000 :: fe6 :: Submitting gfspost_f156-f156 +2023-06-17 15:21:44 +0000 :: fe6 :: Submitting gfspost_f162-f162 +2023-06-17 15:21:44 +0000 :: fe6 :: Submitting gfspost_f168-f168 +2023-06-17 15:21:44 +0000 :: fe6 :: Submission of gfspost_f042-f042 succeeded, jobid=29564569 +2023-06-17 15:21:44 +0000 :: fe6 :: Submission of gfspost_f054-f054 succeeded, jobid=29564570 +2023-06-17 15:21:44 +0000 :: fe6 :: Submission of gfspost_f150-f150 succeeded, jobid=29564571 +2023-06-17 15:21:44 +0000 :: fe6 :: Submission of gfspost_f156-f156 succeeded, jobid=29564572 +2023-06-17 15:21:44 +0000 :: fe6 :: Submission of gfspost_f162-f162 succeeded, jobid=29564573 +2023-06-17 15:21:45 +0000 :: fe6 :: Submission of gfspost_f168-f168 succeeded, jobid=29564574 +2023-06-17 15:43:14 +0000 :: fe6 :: Submitting gfsgetic +2023-06-17 15:43:14 +0000 :: fe6 :: Submission of gfsgetic succeeded, jobid=29565594 +2023-06-17 15:43:40 +0000 :: fe6 :: Task gfsgetic, jobid=29565594, in state SUCCEEDED (COMPLETED), ran for 3.0 seconds, exit status=0, try=1 (of 2) +2023-06-17 15:43:40 +0000 :: fe6 :: Submitting gfsfcst +2023-06-17 15:43:40 +0000 :: fe6 :: Submission status of gfsfcst is pending at druby://fe6:45825 +2023-06-18 15:14:44 +0000 :: fe5 :: Submission status of previously pending gfsfcst is success, jobid=29565602 +2023-06-18 15:14:46 +0000 :: fe5 :: Task gfsfcst, jobid=29565602, in state UNKNOWN (CANCELLED by 1254) +2023-06-18 15:15:25 +0000 :: fe5 :: Task gfsfcst, jobid=29565602, in state UNKNOWN (CANCELLED by 1254) +2023-06-18 15:15:50 +0000 :: fe5 :: Task gfsfcst, jobid=29565602, in state LOST (CANCELLED by 1254), giving up because job state could not be determined 3 consecutive times, try=1 (of 2) +2023-06-18 15:15:55 +0000 :: fe5 :: Submitting gfsfcst +2023-06-18 15:15:55 +0000 :: fe5 :: Submission status of gfsfcst is pending at druby://fe5:38905 +2023-06-22 17:04:26 +0000 :: fe8 :: Submission status of gfsfcst for cycle 202211090000 could not be retrieved because the server process at druby://fe5:38905 died +2023-06-22 17:04:27 +0000 :: fe8 :: Submission of gfsfcst for cycle 202211090000 probably, but not necessarily, failed. It will be resubmitted +2023-06-22 17:04:28 +0000 :: fe8 :: Submitting gfsfcst +2023-06-22 17:04:28 +0000 :: fe8 :: Submission status of gfsfcst is pending at druby://fe8:36359 +2023-06-22 17:04:56 +0000 :: fe8 :: Submission status of previously pending gfsfcst is success, jobid=29874800 +2023-06-22 17:04:57 +0000 :: fe8 :: Task gfsfcst, jobid=29874800, in state QUEUED (PENDING) diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/noent_c3.xml b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/noent_c3.xml new file mode 100644 index 0000000000..09fb8b7e87 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/noent_c3.xml @@ -0,0 +1,270 @@ + + + + + + + + + + + +]> + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/makeinit_link.sh + + v17_p8_c3_12x12_xjet_1wg_14wt_gfsinit_@H + gsd-fv3-dev + batch + xjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt + ICSDIR/lfs1/BMC/gsd-fv3-test/rtfim/FV3ICS_L127 + CASEC768 + COMPONENTatmos + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/v17_p8_c3_12x12_xjet_1wg_14wt + + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt/gfs.@Y@m@d/@H/model_data/atmos/input + + + /lfs1/BMC/gsd-fv3-test/rtfim/FV3ICS_L127/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /lfs1/BMC/gsd-fv3-test/rtfim/FV3ICS_L127/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/fcst.sh + + v17_p8_c3_12x12_xjet_1wg_14wt_gfsfcst_@H + gsd-fv3-dev + batch + xjet + 04:00:00 + 158:ppn=24:tpp=1 + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/v17_p8_c3_12x12_xjet_1wg_14wt + + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/post.sh + + v17_p8_c3_12x12_xjet_1wg_14wt_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:20:00 + 1:ppn=24:tpp=1 + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/v17_p8_c3_12x12_xjet_1wg_14wt + FHRGRP#grp# + FHRLST#lst# + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_c3_12x12_xjet_1wg_14wt/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/runcmds b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/runcmds new file mode 100644 index 0000000000..714bc3036c --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_c3.xml -d v17_p8_c3.db +rocotostat -w v17_p8_c3.xml -d v17_p8_c3.db diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab new file mode 100644 index 0000000000..a1932975eb --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.crontab @@ -0,0 +1,5 @@ + +#################### jet #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.db -w /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.db b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.db new file mode 100644 index 0000000000..563ba992e2 Binary files /dev/null and b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.db differ diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml new file mode 100644 index 0000000000..f58bde2584 --- /dev/null +++ b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3.xml @@ -0,0 +1,273 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + xjet + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + batch + xjet + 04:00:00 + 158:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:20:00 + 1:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3_lock.db b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3_lock.db new file mode 100644 index 0000000000..ddf60c6c22 Binary files /dev/null and b/FV3GFSwfm/v17_p8_c3_12x12_xjet_1wg_14wt/v17_p8_c3_lock.db differ diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/12x12x1wgx14wt b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/12x12x1wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aero b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aerosol_init b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.anal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.analcalc b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.analdiag b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.arch b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.awips b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.base b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.base new file mode 100644 index 0000000000..c4da9078e7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.base @@ -0,0 +1,384 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_POST_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${ROTDIR}/.." +export PTMP="${ROTDIR}/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_1wg_14wt" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#JKHexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#JKHexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#JKHexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#JKHexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ### JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.com b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.coupled_ic b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.earc b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ecen b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.echgres b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ediag b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.efcs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.eobs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.epos b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.esfc b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.eupd b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.fcst b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.fit2obs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.gempak b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ice b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.metp b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.nsst b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocn b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnpost b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.post b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.postsnd b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.prep b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.preplandobs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.resources b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.sfcanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ufs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ufs new file mode 100644 index 0000000000..708f6607a9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=1 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.vrfy b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsblending b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgcip b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wave b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavegempak b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveprep b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/runcmds b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/runcmds new file mode 100644 index 0000000000..2989ead090 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/runcmds @@ -0,0 +1,7 @@ + +rocotorun -w storms-mynn.xml -d storms-mynn.db +rocotostat -w storms-mynn.xml -d storms-mynn.db + +rocotorun -w test-mynn.xml -d test-mynn.db +rocotostat -w test-mynn.xml -d test-mynn.db + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/storms-mynn.xml b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/storms-mynn.xml new file mode 100644 index 0000000000..a56430c7ab --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/storms-mynn.xml @@ -0,0 +1,161 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 201908281200 201909241200 27:00:00:00 + 201909141800 201910060000 21:06:00:00 + 201907020600 201912221200 173:06:00:00 + 201907110000 201909141200 65:12:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 02:30:00 + 95:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:10:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/test-mynn.crontab b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/test-mynn.crontab new file mode 100644 index 0000000000..a19f2c368e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/test-mynn.crontab @@ -0,0 +1,5 @@ + +#################### test-mynn #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-mynn/test-mynn.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-mynn/test-mynn.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/test-mynn.xml b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/test-mynn.xml new file mode 100644 index 0000000000..e2899f491f --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/test-mynn.xml @@ -0,0 +1,157 @@ + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:00:00 + 95:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 + f000 f006 f012 f018 f024 + f000 f006 f012 f018 f024 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/12x12x1wgx14wt b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/12x12x1wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/2nodes_post b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/2nodes_post new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aero b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aerosol_init b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.anal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.analcalc b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.analdiag b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.arch b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.awips b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.base b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.base new file mode 100644 index 0000000000..20cf2326c1 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.base @@ -0,0 +1,383 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${ROTDIR}/.." +export PTMP="${ROTDIR}/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_1wg_14wt_120h" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#JKHexport FHMAX_GFS_00=${FHMAX_GFS_00:-24} +#JKHexport FHMAX_GFS_06=${FHMAX_GFS_06:-24} +#JKHexport FHMAX_GFS_12=${FHMAX_GFS_12:-24} +#JKHexport FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ### JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.com b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.coupled_ic b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.defaults.s2sw b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.earc b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ecen b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.echgres b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ediag b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.efcs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.eobs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.epos b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.esfc b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.eupd b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.fcst b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.fit2obs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.gempak b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ice b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.metp b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.nsst b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocn b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanal b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalbmat b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalpost b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalprep b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalrun b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnpost b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.post b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.postsnd b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.prep b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.preplandobs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.resources b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.resources.nco.static b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.sfcanl b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ufs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ufs new file mode 100644 index 0000000000..708f6607a9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=1 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.vrfy b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsblending b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgcip b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgrib2 b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wave b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveawipsbulls b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveawipsgridded b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavegempak b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveinit b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostpnt b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostsbs b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveprep b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/noent_test-mynn.xml b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/noent_test-mynn.xml new file mode 100644 index 0000000000..4ae6416c29 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/noent_test-mynn.xml @@ -0,0 +1,151 @@ + + + + + + + + + + +]> + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/jobs/rocoto/makeinit_link.sh + + v17_p8_mynn_12x12_1wg_14wt_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt + ICSDIR/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127/ + CASEC768 + COMPONENTatmos + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/v17_p8_mynn_12x12_1wg_14wt + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt/gfs.@Y@m@d/@H/model_data/atmos/input + + + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127//@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127//@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/jobs/rocoto/fcst.sh + + v17_p8_mynn_12x12_1wg_14wt_gfsfcst_@H + gsd-fv3 + batch + hera + 03:00:00 + 95:ppn=40:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/v17_p8_mynn_12x12_1wg_14wt + + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/jobs/rocoto/post.sh + + v17_p8_mynn_12x12_1wg_14wt_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 2:ppn=40:tpp=1 + --export=NONE + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa + EXPDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt + ROTDIR/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/v17_p8_mynn_12x12_1wg_14wt + FHRGRP#grp# + FHRLST#lst# + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/v17_p8_mynn_12x12_1wg_14wt/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/runcmds b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/runcmds new file mode 100644 index 0000000000..b7e934b853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/runcmds @@ -0,0 +1,4 @@ + +rocotorun -w test-mynn.xml -d test-mynn.db +rocotostat -w test-mynn.xml -d test-mynn.db + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/test-mynn.crontab b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/test-mynn.crontab new file mode 100644 index 0000000000..a19f2c368e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/test-mynn.crontab @@ -0,0 +1,5 @@ + +#################### test-mynn #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-mynn/test-mynn.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-mynn/test-mynn.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/test-mynn.xml b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/test-mynn.xml new file mode 100644 index 0000000000..7f4ef6cc61 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_1wg_14wt_120h/test-mynn.xml @@ -0,0 +1,154 @@ + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:00:00 + 95:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aero b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.anal b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.analcalc b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.analdiag b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.arch b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanl b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.awips b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.base b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.base new file mode 100644 index 0000000000..91a0bdeb0f --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.base @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_POST_BATCH="sjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs1/BMC/gsd-fv3-test/NCEPDEV/global/$USER" +export STMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export PTMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_vjet_2wg_12wt" +export EXPDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.com b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.earc b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ecen b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.echgres b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ediag b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.efcs b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.eobs b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.epos b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.esfc b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.eupd b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.fcst b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.gempak b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ice b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanl b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.metp b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.nsst b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocn b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.post b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.postsnd b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.prep b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.preplandobs b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.resources b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ufs b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..c9021961d6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.ufs @@ -0,0 +1,378 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + if [[ "${machine}" == "HERA" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + fi + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.vrfy b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafs b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wave b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveinit b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveprep b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/runcmds b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..8fd1a5ad66 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/runcmds @@ -0,0 +1,6 @@ + +rocotorun -w v17_p8_mynn.xml -d v17_p8_mynn.db +rocotostat -w v17_p8_mynn.xml -d v17_p8_mynn.db + +rocotorun -w v17_p8_post_sjet.xml -d post.db +rocotostat -w v17_p8_post_sjet.xml -d post.db diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_mynn.crontab b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_mynn.crontab new file mode 100644 index 0000000000..1874040566 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_mynn.crontab @@ -0,0 +1,5 @@ + +#################### jet #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_mynn.db -w /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_mynn.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_mynn.xml b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_mynn.xml new file mode 100644 index 0000000000..4c78718260 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_mynn.xml @@ -0,0 +1,273 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + batch + vjet + 04:00:00 + 252:ppn=16:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 00:20:00 + 1:ppn=16:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_post_sjet.xml b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_post_sjet.xml new file mode 100644 index 0000000000..a4939e0e22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_vjet_2wg_12wt/v17_p8_post_sjet.xml @@ -0,0 +1,76 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + sjet + 00:20:00 + 1:ppn=16:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/12x12x2wgx12wt b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/12x12x2wgx12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aero b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.anal b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.analcalc b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.analdiag b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.arch b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanl b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.awips b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.base b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.base new file mode 100644 index 0000000000..472b2dfaac --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.base @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_POST_BATCH="sjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs1/BMC/gsd-fv3-test/NCEPDEV/global/$USER" +export STMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export PTMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_xjet_2wg_12wt" +export EXPDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.base_f024 b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.base_f024 new file mode 100644 index 0000000000..872441362a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.base_f024 @@ -0,0 +1,393 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT=fim ## JKH + +# Directories relative to installation areas: +export HOMEgfs=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs4/BMC/gsd-fv3-dev/NCEPDEV/global/$USER" +export STMP="$ROTDIR/.." +export PTMP="$ROTDIR/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEfv3gfs="${HOMEgfs}/sorc/fv3gfs.fd" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_mynn_12x12_xjet_2wg_12wt" +export EXPDIR="${HOMEgfs}/FV3GFSwfm/${PSLOT}" +export ROTDIR="${HOMEgfs}/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${CDATE}" -ge "2019092100" && "${CDATE}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_mynn" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENKF="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENKF +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='gnh_10m aoc_9km gsh_15m' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if DO_GLDAS is set or (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +#jkhexport FHMAX_GFS_00=${FHMAX_GFS_00:-168} +#jkhexport FHMAX_GFS_06=${FHMAX_GFS_06:-168} +#jkhexport FHMAX_GFS_12=${FHMAX_GFS_12:-168} +#jkhexport FHMAX_GFS_18=${FHMAX_GFS_18:-168} +export FHMAX_GFS_00=${FHMAX_GFS_00:-24} +export FHMAX_GFS_06=${FHMAX_GFS_06:-24} +export FHMAX_GFS_12=${FHMAX_GFS_12:-24} +export FHMAX_GFS_18=${FHMAX_GFS_18:-24} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# run GLDAS to spin up land ICs +export DO_GLDAS="NO" +export gldas_cyc=00 + +# Exception handling that when DO_GLDAS is set, the FHOUT must be 1 +if [[ ${DO_GLDAS} = "YES" ]]; then + export FHOUT=1 +fi + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIVAR="NO" +export DO_JEDIENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENKF=@NMEM_ENKF@ +export NMEM_EFCS=30 +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${CDATE}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.com b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.earc b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ecen b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.echgres b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ediag b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.efcs b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.eobs b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.epos b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.esfc b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.eupd b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.gempak b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ice b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanl b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.metp b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.nsst b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocn b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.post b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.postsnd b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.prep b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.preplandobs b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.resources b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ufs b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..aabf25c0fe --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.ufs @@ -0,0 +1,378 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + if [[ "${machine}" == "HERA" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + fi + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.vrfy b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..919ffec544 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.vrfy @@ -0,0 +1,123 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="NO" # Make 0.25-deg pgb files in ARCDIR for precip verification ## JKH +export VRFYRAD="NO" # Radiance data assimilation monitoring ## JKH +export VRFYOZN="NO" # Ozone data assimilation monitoring ## JKH +export VRFYMINMON="NO" # GSI minimization monitoring ## JKH +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="NO" # Cyclone genesis verification ## JKH +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +#JKHexport ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export ens_tracker_ver=v1.1.15.6 ## JKH +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + ## JKH + if [[ "${machine}" = "JET" ]]; then + COMINsyn=${COMINsyn:-/mnt/lfs4/HFIP/hwrf-data/hwrf-input/SYNDAT-PLUS} ## JKH + else + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + fi + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi + + ## JKH + if [[ "${machine}" = "JET" ]]; then + export COMINsyn=${COMINsyn:-/mnt/lfs4/HFIP/hwrf-data/hwrf-input/SYNDAT-PLUS} + else + export COMINsyn=${COMINsyn:-${COMROOT}/gfs/prod/syndat} + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafs b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wave b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveinit b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveprep b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/noent_mynn.xml b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/noent_mynn.xml new file mode 100644 index 0000000000..49c9af8f02 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/noent_mynn.xml @@ -0,0 +1,268 @@ + + + + + + + + + + + +]> + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/makeinit_link.sh + + v17_p8_mynn_12x12_xjet_2wg_12wt_gfsinit_@H + gsd-fv3-dev + batch + xjet + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt + ICSDIR/lfs1/BMC/gsd-fv3-test/rtfim/FV3ICS_L127 + CASEC768 + COMPONENTatmos + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/v17_p8_mynn_12x12_xjet_2wg_12wt + + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/gfs.@Y@m@d/@H/model_data/atmos/input + + + /lfs1/BMC/gsd-fv3-test/rtfim/FV3ICS_L127/@Y@m@d@H/gfs/C768/INPUT/gfs_data.tile6.nc + /lfs1/BMC/gsd-fv3-test/rtfim/FV3ICS_L127/@Y@m@d@H/gfs/C768/INPUT/sfc_data.tile6.nc + + + + + + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/fcst.sh + + v17_p8_mynn_12x12_xjet_2wg_12wt_gfsfcst_@H + gsd-fv3-dev + batch + xjet + 04:00:00 + 168:ppn=24:tpp=1 + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/v17_p8_mynn_12x12_xjet_2wg_12wt + + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/post.sh + + v17_p8_mynn_12x12_xjet_2wg_12wt_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:20:00 + 1:ppn=24:tpp=1 + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/v17_p8_mynn_12x12_xjet_2wg_12wt + FHRGRP#grp# + FHRLST#lst# + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/vrfy.sh + + v17_p8_mynn_12x12_xjet_2wg_12wt_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=1:tpp=1 + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/jet + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/jet + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun/RUNDIRS/jet + + + + + + + + + + g2g1 g2o1 pcp1 + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/metp.sh + + v17_p8_mynn_12x12_xjet_2wg_12wt_gfsmetp#metpcase#_@H + gsd-fv3-dev + batch + xjet + 06:00:00 + 1:ppn=4:tpp=1 + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/logs/@Y@m@d@H/gfsmetp#metpcase#.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/jet + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/jet + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun/RUNDIRS/jet + SDATE_GFSNone + METPCASE#metpcase# + + + + + + + + + + + + + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/jobs/rocoto/arch.sh + + v17_p8_mynn_12x12_xjet_2wg_12wt_gfsarch_@H + gsd-fv3-dev + batch + service + 06:00:00 + 1:ppn=1:tpp=1 + 4096M + --export=NONE + + /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/v17_p8_mynn_12x12_xjet_2wg_12wt/logs/@Y@m@d@H/gfsarch.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/jet + ROTDIR/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/jet + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun/RUNDIRS/jet + + + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/runcmds b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..46c90d8220 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/runcmds @@ -0,0 +1,7 @@ + +rocotorun -w v17_p8_mynn.xml -d v17_p8_mynn.db +rocotostat -w v17_p8_mynn.xml -d v17_p8_mynn.db + + +rocotorun -w v17_p8_post_sjet.xml -d post.db +rocotostat -w v17_p8_post_sjet.xml -d post.db diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.crontab b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.crontab new file mode 100644 index 0000000000..f8dcab432e --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.crontab @@ -0,0 +1,5 @@ + +#################### jet #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.db -w /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml new file mode 100644 index 0000000000..ead2acf0ed --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_mynn.xml @@ -0,0 +1,275 @@ + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + xjet + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + batch + xjet + 04:00:00 + 168:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:20:00 + 1:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 00:30:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ATCFNAME&ATCFNAME; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_post_sjet.xml b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_post_sjet.xml new file mode 100644 index 0000000000..15db5a4017 --- /dev/null +++ b/FV3GFSwfm/v17_p8_mynn_12x12_xjet_2wg_12wt/v17_p8_post_sjet.xml @@ -0,0 +1,82 @@ + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + + _f000-f000 + f000 + f000 + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + sjet + 00:20:00 + 1:ppn=16:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/12x12x1wgx14wt b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/12x12x1wgx14wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/1node_post b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/1node_post new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aero b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanl b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aerosol_init b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.anal b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.analcalc b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.analdiag b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.arch b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanl b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanl b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.awips b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.base b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.base new file mode 100644 index 0000000000..7779cd128d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.base @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_POST_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="${ROTDIR}/.." +export PTMP="${ROTDIR}/.." +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_12x12_1wg_14wt" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.com b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.coupled_ic b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.earc b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ecen b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.echgres b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ediag b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.efcs b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.eobs b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.epos b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.esfc b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.eupd b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.fcst b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.fit2obs b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.gempak b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ice b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanl b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.metp b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.nsst b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocn b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanal b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnpost b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.post b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.postsnd b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.prep b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.preplandobs b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.resources b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.sfcanl b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ufs b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ufs new file mode 100644 index 0000000000..708f6607a9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=1 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.vrfy b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafs b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsblending b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgcip b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wave b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavegempak b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveinit b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveprep b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/runcmds b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/runcmds new file mode 100644 index 0000000000..c4ce99041f --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/runcmds @@ -0,0 +1,7 @@ + +rocotorun -w storms-thmp.xml -d storms-thmp.db +rocotostat -w storms-thmp.xml -d storms-thmp.db + +rocotorun -w test-thompson.xml -d test-thompson.db +rocotostat -w test-thompson.xml -d test-thompson.db + diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/storms-thmp.xml b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/storms-thmp.xml new file mode 100644 index 0000000000..ee8ff41b5f --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/storms-thmp.xml @@ -0,0 +1,161 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 201908281200 201909241200 27:00:00:00 + 201909141800 201910060000 21:06:00:00 + 201907020600 201912221200 173:06:00:00 + 201907110000 201909141200 65:12:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 02:30:00 + 95:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:10:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/gw_19may23/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/test-thompson.crontab b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/test-thompson.crontab new file mode 100644 index 0000000000..5bf630615a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/test-thompson.crontab @@ -0,0 +1,5 @@ + +#################### test-thompson #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-thompson/test-thompson.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-thompson/test-thompson.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/test-thompson.xml b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/test-thompson.xml new file mode 100644 index 0000000000..caea04bf51 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_1wg_14wt/test-thompson.xml @@ -0,0 +1,157 @@ + + + + + + + + + + + +]> + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-thompson/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:00:00 + 101:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/12x12x2wg12wt b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/12x12x2wg12wt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/2nodes_post b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/2nodes_post new file mode 100644 index 0000000000..e69de29bb2 diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aero b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanl b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aerosol_init b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.anal b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.analcalc b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.analdiag b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.arch b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanl b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanl b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.awips b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.base b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.base new file mode 100644 index 0000000000..9be8431c80 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.base @@ -0,0 +1,379 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="HERA" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="hera" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/scratch1/NCEPDEV/global/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/scratch1/NCEPDEV/global/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/scratch1/NCEPDEV/global/glopara/dump" + +# USER specific paths +export HOMEDIR="/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}" +export STMP="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/" +export PTMP="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/scratch1/NCEPDEV/global/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022111000 +export EDATE=2022111000 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="test-thompson" +export EXPDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/${PSLOT}" +export ROTDIR="/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.com b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.coupled_ic b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.earc b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ecen b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.echgres b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ediag b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.efcs b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.eobs b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.epos b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.esfc b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.eupd b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.fcst b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.fit2obs b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.gempak b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ice b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanl b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.metp b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.nsst b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocn b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanal b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnpost b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.post b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.postsnd b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.prep b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.preplandobs b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.resources b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.sfcanl b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ufs b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ufs new file mode 100644 index 0000000000..5d3a16b329 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + #JKHexport WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.vrfy b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafs b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsblending b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgcip b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wave b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavegempak b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveinit b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveprep b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/runcmds b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/runcmds new file mode 100644 index 0000000000..15414663b6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/runcmds @@ -0,0 +1,4 @@ + +rocotorun -w test-thompson.xml -d test-thompson.db +rocotostat -w test-thompson.xml -d test-thompson.db + diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/test-thompson.crontab b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/test-thompson.crontab new file mode 100644 index 0000000000..5bf630615a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/test-thompson.crontab @@ -0,0 +1,5 @@ + +#################### test-thompson #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-thompson/test-thompson.db -w /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-thompson/test-thompson.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/test-thompson.xml b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/test-thompson.xml new file mode 100644 index 0000000000..caea04bf51 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_2wg_12wt/test-thompson.xml @@ -0,0 +1,157 @@ + + + + + + + + + + + +]> + + + + /scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSwfm/test-thompson/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 04:00:00 + 101:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 00:20:00 + 1:ppn=40:tpp=1 + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun//RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aero b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.anal b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.analcalc b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.analdiag b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.arch b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanl b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.awips b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.base b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.base new file mode 100644 index 0000000000..b44118db5e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.base @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="vjet" +export PARTITION_POST_BATCH="sjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs1/BMC/gsd-fv3-test/NCEPDEV/global/$USER" +export STMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export PTMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_12x12_vjet_2wg_12wt" +export EXPDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.com b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.earc b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ecen b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.echgres b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ediag b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.efcs b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.eobs b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.epos b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.esfc b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.eupd b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.fcst b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.gempak b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ice b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanl b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.metp b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.nsst b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocn b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.post b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.postsnd b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.prep b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.preplandobs b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.resources b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ufs b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..c9021961d6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.ufs @@ -0,0 +1,378 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + if [[ "${machine}" == "HERA" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + fi + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.vrfy b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafs b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wave b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveinit b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveprep b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/runcmds b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..f1a8d2d8cd --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_thompson.xml -d v17_p8_thompson.db +rocotostat -w v17_p8_thompson.xml -d v17_p8_thompson.db diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/v17_p8_thompson.crontab b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/v17_p8_thompson.crontab new file mode 100644 index 0000000000..0c84f6f52a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/v17_p8_thompson.crontab @@ -0,0 +1,5 @@ + +#################### jet #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/v17_p8_thompson.db -w /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/v17_p8_thompson.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/v17_p8_thompson.xml b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/v17_p8_thompson.xml new file mode 100644 index 0000000000..230edb2b5a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_vjet_2wg_12wt/v17_p8_thompson.xml @@ -0,0 +1,273 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + vjet + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + batch + vjet + 04:00:00 + 252:ppn=16:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + vjet + 00:20:00 + 1:ppn=16:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aero b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl new file mode 100644 index 0000000000..27ef3aca7d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.anal b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.arch b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.awips b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base new file mode 100644 index 0000000000..f9d8d0b594 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.base @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="JET" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="gsd-fv3-dev" +export QUEUE="batch" +export QUEUE_SERVICE="batch" +export PARTITION_BATCH="xjet" +export PARTITION_POST_BATCH="sjet" +export PARTITION_SERVICE="service" + +# Project to use in mass store: +HPSS_PROJECT="fim" + +# Directories relative to installation areas: +export HOMEgfs=/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="/lfs4/HFIP/hfv3gfs/glopara/nwpara" # TODO: set via prod_envir in Ops +export COMROOT="/lfs4/HFIP/hfv3gfs/glopara/com" # TODO: set via prod_envir in Ops +export COMINsyn="${COMROOT}/gfs/prod/syndat" +export DMPDIR="/lfs4/HFIP/hfv3gfs/glopara/dump" + +# USER specific paths +export HOMEDIR="/lfs1/BMC/gsd-fv3-test/NCEPDEV/global/$USER" +export STMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export PTMP="/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun" +export NOSCRUB="$HOMEDIR" + +# Base directories for various builds +export BASE_GIT="/lfs4/HFIP/hfv3gfs/glopara/git" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="forecast-only" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="YES" +export CHGRP_CMD="chgrp rstprod" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=2022110900 +export EDATE=2022110900 +export EXP_WARM_START=".false." +export assim_freq=6 +export PSLOT="v17_p8_thompson_12x12_xjet_2wg_12wt" +export EXPDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/${PSLOT}" +export ROTDIR="/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=ATM + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="FV3_GFS_v17_p8_thompson" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="C768" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=1 # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=8 + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="NO" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="NO" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="YES" # save data to HPSS archive +export LOCALARCH="NO" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.com b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.earc b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.epos b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst new file mode 100644 index 0000000000..e0943c33ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fcst @@ -0,0 +1,431 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export FCSTEXEC="ufs_model_haiqin.x" +else + export FCSTEXEC="ufs_model.x" +fi + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + + + +# Sponge layer settings +export tau=10.0 +export rf_cutoff=7.5e2 +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=.true. + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" + export ltaerosol=.true. + export lradar=.true. + + ## GSL namelist changes + export vtdm4_nh_nonmono=0.03 ### JKH - 10dec + export nord=3 ### JKH - 10dec + #JKH ??? export dt_inner=40. ### JKH - 10dec + #JKH export sedi_semi=??? + if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then + export k_split=6 + export n_split=2 + fi + + export kord_tm=-11 ### JKH - 10dec + export kord_mt=11 ### JKH - 10dec + export kord_wz=11 ### JKH - 10dec + export kord_tr=11 ### JKH - 10dec + export d_con_nonmono=0.5 ### JKH - 10dec + export hord_mt_nh_nonmono=6 ### JKH - 10dec + export hord_xx_nh_nonmono=6 ### JKH - 10dec + else + export ncld=2 + #JKHif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + fi + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" + +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ice b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl new file mode 100644 index 0000000000..51174dedca --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.metp b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal new file mode 100644 index 0000000000..f5925809fc --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST= +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25 +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML= +export SOCA_NINNER=50 +export CASE_ANL=C48 +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=s2s_v1 +export R2D2_OBS_SRC=gdas_marine +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=/scratch2/NCEPDEV/marineda/r2d2 + +# NICAS +export NICAS_RESOL=1 +export NICAS_GRID_SIZE=15000 + +echo "END: config.ocnanal" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.post b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.post new file mode 100644 index 0000000000..3fca0a7b65 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.post @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=YES # grib2 flux file written by post + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +export downset=2 +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.prep b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.preplandobs b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs new file mode 100644 index 0000000000..aabf25c0fe --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.ufs @@ -0,0 +1,378 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + export WRITE_GROUP_GFS=2 + if [[ "${machine}" == "HERA" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + fi + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy new file mode 100644 index 0000000000..c277e8e963 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.vrfy @@ -0,0 +1,110 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="YES" # Make 0.25-deg pgb files in ARCDIR for precip verification +export VRFYRAD="YES" # Radiance data assimilation monitoring +export VRFYOZN="YES" # Ozone data assimilation monitoring +export VRFYMINMON="YES" # GSI minimization monitoring +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="YES" # Cyclone genesis verification +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +export ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wave b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/runcmds b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/runcmds new file mode 100644 index 0000000000..f1a8d2d8cd --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/runcmds @@ -0,0 +1,3 @@ + +rocotorun -w v17_p8_thompson.xml -d v17_p8_thompson.db +rocotostat -w v17_p8_thompson.xml -d v17_p8_thompson.db diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab new file mode 100644 index 0000000000..40fc7f2e4d --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.crontab @@ -0,0 +1,5 @@ + +#################### jet #################### +MAILTO="" +*/5 * * * * /apps/rocoto/1.3.3/bin/rocotorun -d /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.db -w /lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml +################################################################# diff --git a/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml new file mode 100644 index 0000000000..c4bb12ba67 --- /dev/null +++ b/FV3GFSwfm/v17_p8_thompson_12x12_xjet_2wg_12wt/v17_p8_thompson.xml @@ -0,0 +1,273 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + xjet + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + batch + xjet + 04:00:00 + 168:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:20:00 + 1:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23/FV3GFSrun/RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + + diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 0000000000..0927556b54 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,157 @@ +### GNU LESSER GENERAL PUBLIC LICENSE + +Version 3, 29 June 2007 + +Copyright (C) 2007 Free Software Foundation, Inc. + + +Everyone is permitted to copy and distribute verbatim copies of this +license document, but changing it is not allowed. + +This version of the GNU Lesser General Public License incorporates the +terms and conditions of version 3 of the GNU General Public License, +supplemented by the additional permissions listed below. + +#### 0. Additional Definitions. + +As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the +GNU General Public License. + +"The Library" refers to a covered work governed by this License, other +than an Application or a Combined Work as defined below. + +An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + +A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + +The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + +The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + +#### 1. Exception to Section 3 of the GNU GPL. + +You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + +#### 2. Conveying Modified Versions. + +If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + +- a) under this License, provided that you make a good faith effort + to ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or +- b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + +#### 3. Object Code Incorporating Material from Library Header Files. + +The object code form of an Application may incorporate material from a +header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + +- a) Give prominent notice with each copy of the object code that + the Library is used in it and that the Library and its use are + covered by this License. +- b) Accompany the object code with a copy of the GNU GPL and this + license document. + +#### 4. Combined Works. + +You may convey a Combined Work under terms of your choice that, taken +together, effectively do not restrict modification of the portions of +the Library contained in the Combined Work and reverse engineering for +debugging such modifications, if you also do each of the following: + +- a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. +- b) Accompany the Combined Work with a copy of the GNU GPL and this + license document. +- c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. +- d) Do one of the following: + - 0) Convey the Minimal Corresponding Source under the terms of + this License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + - 1) Use a suitable shared library mechanism for linking with + the Library. A suitable mechanism is one that (a) uses at run + time a copy of the Library already present on the user's + computer system, and (b) will operate properly with a modified + version of the Library that is interface-compatible with the + Linked Version. +- e) Provide Installation Information, but only if you would + otherwise be required to provide such information under section 6 + of the GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the Application + with a modified version of the Linked Version. (If you use option + 4d0, the Installation Information must accompany the Minimal + Corresponding Source and Corresponding Application Code. If you + use option 4d1, you must provide the Installation Information in + the manner specified by section 6 of the GNU GPL for conveying + Corresponding Source.) + +#### 5. Combined Libraries. + +You may place library facilities that are a work based on the Library +side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + +- a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities, conveyed under the terms of this License. +- b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + +#### 6. Revised Versions of the GNU Lesser General Public License. + +The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +as you received it specifies that a certain numbered version of the +GNU Lesser General Public License "or any later version" applies to +it, you have the option of following the terms and conditions either +of that published version or of any later version published by the +Free Software Foundation. If the Library as you received it does not +specify a version number of the GNU Lesser General Public License, you +may choose any version of the GNU Lesser General Public License ever +published by the Free Software Foundation. + +If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. diff --git a/README.md b/README.md index 472d24c054..465b0529fa 100644 --- a/README.md +++ b/README.md @@ -1,53 +1,40 @@ -# global-workflow -Global Superstructure/Workflow currently supporting the Finite-Volume on a Cubed-Sphere Global Forecast System (FV3GFS) - -The global-workflow depends on the following prerequisities to be available on the system: - -* workload management platform / scheduler - LSF or SLURM -* workflow manager - ROCOTO (https://github.com/christopherwharrop/rocoto) -* modules - NCEPLIBS (various), esmf v8.0.0bs48, hdf5, intel/ips v18, impi v18, wgrib2, netcdf v4.7.0, hpss, gempak (see module files under /modulefiles for additional details) +[![Read The Docs Status](https://readthedocs.org/projects/global-workflow/badge/?badge=latest)](http://global-workflow.readthedocs.io/) +[![shellnorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/linters.yaml) +[![pynorms](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pynorms.yaml) +[![pytests](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pytests.yaml/badge.svg)](https://github.com/NOAA-EMC/global-workflow/actions/workflows/pytests.yaml) -The global-workflow current supports the following machines: - -* WCOSS-Dell -* WCOSS-Cray -* Hera -* Orion - -Quick-start instructions are below. Full instructions are available in the [wiki](https://github.com/NOAA-EMC/global-workflow/wiki/Run-Global-Workflow) +# global-workflow +Global Workflow currently supporting the Global Forecast System (GFS) with the [UFS-weather-model](https://github.com/ufs-community/ufs-weather-model) and [GSI](https://github.com/NOAA-EMC/GSI)-based Data Assimilation System. -## Build global-workflow: +The `global-workflow` depends on the following prerequisities to be available on the system: -### 1. Check out components +* Workflow Engine - [Rocoto](https://github.com/christopherwharrop/rocoto) and [ecFlow](https://github.com/ecmwf/ecflow) (for NWS Operations) +* Compiler - Intel Compiler Suite +* Software - NCEPLIBS (various), ESMF, HDF5, NetCDF, and a host of other softwares (see module files under /modulefiles for additional details) -While in /sorc folder: -``` -$ sh checkout.sh -``` +The `global-workflow` current supports the following tier-1 machines: -### 2. Build components +* NOAA RDHPCS - Hera +* MSU HPC - Orion +* NOAA's operational HPC - WCOSS2 -While in /sorc folder: +Additionally, the following tier-2 machine is supported: +* SSEC at Univ. of Wisconsin - S4 (Note that S2S+ experiments are not fully supported) -Uncoupled -``` -$ sh build_all.sh -``` -Coupled -``` -$ sh build_all.sh -c -``` +Documentation (in progress) is available [here](https://global-workflow.readthedocs.io/en/latest/). -### 3. Link components +# Disclaimer -While in /sorc folder: +The United States Department of Commerce (DOC) GitHub project code is provided +on an "as is" basis and the user assumes responsibility for its use. DOC has +relinquished control of the information and no longer has responsibility to +protect the integrity, confidentiality, or availability of the information. Any +claims against the Department of Commerce stemming from the use of its GitHub +project will be governed by all applicable Federal law. Any reference to +specific commercial products, processes, or services by service mark, +trademark, manufacturer, or otherwise, does not constitute or imply their +endorsement, recommendation or favoring by the Department of Commerce. The +Department of Commerce seal and logo, or the seal and logo of a DOC bureau, +shall not be used in any manner to imply endorsement of any commercial product +or activity by DOC or the United States Government. -Uncoupled -``` -$ sh link_workflow.sh emc $MACHINE -``` -Coupled -``` -$ sh link_workflow.sh emc $MACHINE coupled -``` -...where $MACHINE is "dell", "cray", "hera", or "orion". diff --git a/ci/cases/C96C48_hybatmDA.yaml b/ci/cases/C96C48_hybatmDA.yaml new file mode 100644 index 0000000000..ebfda04fff --- /dev/null +++ b/ci/cases/C96C48_hybatmDA.yaml @@ -0,0 +1,16 @@ +experiment: + type: gfs + mode: cycled + +arguments: + app: ATM + resdet: 96 + resens: 48 + comrot: ${RUNTESTS}/${pslot}/COMROT + expdir: ${RUNTESTS}/${pslot}/EXPDIR + icsdir: ${ICSDIR_ROOT}/C96C48 + idate: 2021122018 + edate: 2021122200 + nens: 2 + gfs_cyc: 1 + start: cold diff --git a/ci/cases/C96_atm3DVar.yaml b/ci/cases/C96_atm3DVar.yaml new file mode 100644 index 0000000000..ca0e3fda6e --- /dev/null +++ b/ci/cases/C96_atm3DVar.yaml @@ -0,0 +1,15 @@ +experiment: + type: gfs + mode: cycled + +arguments: + app: ATM + resdet: 96 + comrot: ${RUNTESTS}/${pslot}/COMROT + expdir: ${RUNTESTS}/${pslot}/EXPDIR + icsdir: ${ICSDIR_ROOT}/C96C48 + idate: 2021122018 + edate: 2021122100 + nens: 0 + gfs_cyc: 1 + start: cold diff --git a/ci/platforms/hera.sh b/ci/platforms/hera.sh new file mode 100644 index 0000000000..a999748b1f --- /dev/null +++ b/ci/platforms/hera.sh @@ -0,0 +1,6 @@ +#!/usr/bin/bash +export GFS_CI_ROOT=/scratch1/NCEPDEV/global/Terry.McGuinness/GFS_CI_ROOT +export SLURM_ACCOUNT=nems +export ICSDIR_ROOT="/scratch1/NCEPDEV/global/glopara/data/ICSDIR" +export max_concurrent_cases=2 +export max_concurrent_pr=2 diff --git a/ci/platforms/orion.sh b/ci/platforms/orion.sh new file mode 100644 index 0000000000..329fc5aab9 --- /dev/null +++ b/ci/platforms/orion.sh @@ -0,0 +1,7 @@ +#!/usr/bin/bash + +export GFS_CI_ROOT=/work2/noaa/global/mterry/GFS_CI_ROOT +export ICSDIR_ROOT=/work/noaa/global/glopara/data/ICSDIR +export SLURM_ACCOUNT=nems +export max_concurrent_cases=2 +export max_concurrent_pr=2 diff --git a/ci/scripts/check_ci.sh b/ci/scripts/check_ci.sh new file mode 100755 index 0000000000..20df09d851 --- /dev/null +++ b/ci/scripts/check_ci.sh @@ -0,0 +1,138 @@ +#!/bin/bash +set -eux +##################################################################################### +# +# Script description: BASH script for checking for cases in a given PR and +# running rocotostat on each to determine if the experiment has +# succeeded or faild. This script is intended +# to run from within a cron job in the CI Managers account +# Abstract TODO +##################################################################################### + +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +scriptname=$(basename "${BASH_SOURCE[0]}") +echo "Begin ${scriptname} at $(date -u)" || true +export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' + +GH=${HOME}/bin/gh +REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"} + +######################################################################### +# Set up runtime environment varibles for accounts on supproted machines +######################################################################### + +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion) + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +module list +set -x +rocotostat=$(command -v rocotostat) +if [[ -z ${rocotostat+x} ]]; then + echo "rocotostat not found on system" + exit 1 +else + echo "rocotostat being used from ${rocotostat}" +fi +rocotocheck=$(command -v rocotocheck) +if [[ -z ${rocotocheck+x} ]]; then + echo "rocotocheck not found on system" + exit 1 +else + echo "rocotocheck being used from ${rocotocheck}" +fi + +pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db" + +pr_list="" +if [[ -f "${pr_list_dbfile}" ]]; then + pr_list=$("${HOMEgfs}/ci/scripts/pr_list_database.py" --display "${pr_list_dbfile}" | grep -v Failed | grep Running | awk '{print $1}') || true +fi +if [[ -z "${pr_list}" ]]; then + echo "no PRs open and ready to run cases on .. exiting" + exit 0 +fi + +############################################################# +# Loop throu all PRs in PR List and look for expirments in +# the RUNTESTS dir and for each one run runcotorun on them +############################################################# + +for pr in ${pr_list}; do + id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + echo "Processing Pull Request #${pr} and looking for cases" + pr_dir="${GFS_CI_ROOT}/PR/${pr}" + + # If there is no RUNTESTS dir for this PR then cases have not been made yet + if [[ ! -d "${pr_dir}/RUNTESTS" ]]; then + continue + fi + num_cases=$(find "${pr_dir}/RUNTESTS" -mindepth 1 -maxdepth 1 -type d | wc -l) || true + + #Check for PR success when ${pr_dir}/RUNTESTS is void of subfolders + # since all successfull ones where previously removed + if [[ "${num_cases}" -eq 0 ]] && [[ -d "${pr_dir}/RUNTESTS" ]]; then + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Passed" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" "${pr_list_dbfile}" + # Completely remove the PR and its cloned repo on sucess of all cases + rm -Rf "${pr_dir}" + continue + fi + + for cases in "${pr_dir}/RUNTESTS/"*; do + pslot=$(basename "${cases}") || true + if [[ -z "${pslot}" ]]; then + echo "No cases found in ${pr_dir}/RUNTESTS .. exiting" + exit 0 + fi + xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml" + db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db" + rocoto_stat_output=$("${rocotostat}" -w "${xml}" -d "${db}" -s | grep -v CYCLE) || true + num_cycles=$(echo "${rocoto_stat_output}" | wc -l) || true + num_done=$(echo "${rocoto_stat_output}" | grep -c Done) || true + num_succeeded=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c SUCCEEDED) || true + echo "${pslot} Total Cycles: ${num_cycles} number done: ${num_done}" || true + num_failed=$("${rocotostat}" -w "${xml}" -d "${db}" -a | grep -c -E 'FAIL|DEAD') || true + if [[ ${num_failed} -ne 0 ]]; then + { + echo "Experiment ${pslot} Terminated: *FAILED*" + echo "Experiment ${pslot} Terminated with ${num_failed} tasks failed at $(date)" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + error_logs=$("${rocotostat}" -d "${db}" -w "${xml}" | grep -E 'FAIL|DEAD' | awk '{print "-c", $1, "-t", $2}' | xargs "${rocotocheck}" -d "${db}" -w "${xml}" | grep join | awk '{print $2}') || true + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Running" --add-label "CI-${MACHINE_ID^}-Failed" + { + echo "Error logs:" + echo "${error_logs}" + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" "${pr_list_dbfile}" + for kill_cases in "${pr_dir}/RUNTESTS/"*; do + pslot=$(basename "${kill_cases}") + sacct --format=jobid,jobname%35,WorkDir%100,stat | grep "${pslot}" | grep "PR\/${pr}\/RUNTESTS" | awk '{print $1}' | xargs scancel || true + done + break + fi + if [[ "${num_done}" -eq "${num_cycles}" ]]; then + { + echo "Experiment ${pslot} completed: *SUCCESS*" + echo "Experiment ${pslot} Completed at $(date)" || true + echo "with ${num_succeeded} successfully completed jobs" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + #Remove Experment cases that completed successfully + rm -Rf "${pr_dir}/RUNTESTS/${pslot}" + fi + done +done diff --git a/ci/scripts/clone-build_ci.sh b/ci/scripts/clone-build_ci.sh new file mode 100755 index 0000000000..022cc44378 --- /dev/null +++ b/ci/scripts/clone-build_ci.sh @@ -0,0 +1,122 @@ +#!/bin/bash +set -eux + +##################################################################### +# Usage and arguments for specfifying cloned directgory +##################################################################### +usage() { + set +x + echo + echo "Usage: $0 -p -d -o -h" + echo + echo " -p PR nunber to clone and build" + echo " -d Full path of of were to clone and build PR" + echo " -o Full path to output message file detailing results of CI tests" + echo " -h display this message and quit" + echo + exit 1 +} + +################################################################ +while getopts "p:d:o:h" opt; do + case ${opt} in + p) + PR=${OPTARG} + ;; + d) + repodir=${OPTARG} + ;; + o) + outfile=${OPTARG} + ;; + h|\?|:) + usage + ;; + *) + echo "Unrecognized option" + usage + exit + ;; + esac +done + +cd "${repodir}" || exit 1 +# clone copy of repo +if [[ -d global-workflow ]]; then + rm -Rf global-workflow +fi + +git clone "${REPO_URL}" +cd global-workflow || exit 1 + +pr_state=$(gh pr view "${PR}" --json state --jq '.state') +if [[ "${pr_state}" != "OPEN" ]]; then + title=$(gh pr view "${PR}" --json title --jq '.title') + echo "PR ${title} is no longer open, state is ${pr_state} ... quitting" + exit 1 +fi + +# checkout pull request +"${GH}" pr checkout "${PR}" --repo "${REPO_URL}" +HOMEgfs="${PWD}" +source "${HOMEgfs}/ush/detect_machine.sh" + +#################################################################### +# start output file +{ + echo "Automated global-workflow Testing Results:" + echo '```' + echo "Machine: ${MACHINE_ID^}" + echo "Start: $(date) on $(hostname)" || true + echo "---------------------------------------------------" +} >> "${outfile}" +###################################################################### + +# get commit hash +commit=$(git log --pretty=format:'%h' -n 1) +echo "${commit}" > "../commit" + +# run checkout script +cd sorc || exit 1 +set +e +./checkout.sh -c -g -u &>> log.checkout +checkout_status=$? +if [[ ${checkout_status} != 0 ]]; then + { + echo "Checkout: *FAILED*" + echo "Checkout: Failed at $(date)" || true + echo "Checkout: see output at ${PWD}/log.checkout" + } >> "${outfile}" + exit "${checkout_status}" +else + { + echo "Checkout: *SUCCESS*" + echo "Checkout: Completed at $(date)" || true + } >> "${outfile}" +fi + +# build full cycle +source "${HOMEgfs}/ush/module-setup.sh" +export BUILD_JOBS=8 +rm -rf log.build +./build_all.sh &>> log.build +build_status=$? + +if [[ ${build_status} != 0 ]]; then + { + echo "Build: *FAILED*" + echo "Build: Failed at $(date)" || true + echo "Build: see output at ${PWD}/log.build" + } >> "${outfile}" + exit "${build_status}" +else + { + echo "Build: *SUCCESS*" + echo "Build: Completed at $(date)" || true + } >> "${outfile}" +fi + +./link_workflow.sh + +echo "check/build/link test completed" +exit "${build_status}" diff --git a/ci/scripts/create_experiment.py b/ci/scripts/create_experiment.py new file mode 100755 index 0000000000..4500e91feb --- /dev/null +++ b/ci/scripts/create_experiment.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python3 + +""" +Basic python script to create an experiment directory on the fly from a given + +yaml file for the arguments to the two scripts below in ${HOMEgfs}/workflow + +where ${HOMEgfs} is specified within the input yaml file. + + ${HOMEgfs}/workflow/setup_expt.py + ${HOMEgfs}/workflow/setup_xml.py + +The yaml file are simply the arguments for these two scripts. +After this scripts runs these two the use will have an experiment ready for launching + +Output +------ + +Functionally an experiment is setup as a result running the two scripts described above +with an error code of 0 upon success. +""" + +import sys +import socket +from pathlib import Path + +from pygw.yaml_file import YAMLFile +from pygw.logger import Logger +from pygw.executable import Executable + +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + +logger = Logger(level='DEBUG', colored_log=True) + + +def input_args(): + """ + Method to collect user arguments for `create_experiment.py` + + Input + ----- + + A single key valued argument: --yaml + + Description + ----------- + + A full path to a YAML file with the following format with required sections: experiment, arguments + + experiment: + mode: + used to hold the only required positional argument to setup_expt.py + + arguments: + holds all the remaining key values pairs for all requisite arguments documented for setup_expt.py + Note: the argument pslot is derived from the basename of the yamlfile itself + + Returns + ------- + + args: Namespace + + Namespace with the value of the file path to a yaml file from the key yaml + """ + + description = """Single argument as a yaml file containing the + key value pairs as arguments to setup_expt.py + """ + + parser = ArgumentParser(description=description, + formatter_class=ArgumentDefaultsHelpFormatter) + + parser.add_argument('--yaml', help='yaml configuration file per experiment', type=str, required=True) + parser.add_argument('--dir', help='full path to top level of repo of global-workflow', type=str, required=True) + + args = parser.parse_args() + return args + + +if __name__ == '__main__': + + user_inputs = input_args() + setup_expt_args = YAMLFile(path=user_inputs.yaml) + + HOMEgfs = user_inputs.dir + pslot = Path(user_inputs.yaml).stem + type = setup_expt_args.experiment.type + mode = setup_expt_args.experiment.mode + + setup_expt_cmd = Executable(Path.absolute(Path.joinpath(Path(HOMEgfs), 'workflow', 'setup_expt.py'))) + + setup_expt_cmd.add_default_arg(type) + setup_expt_cmd.add_default_arg(mode) + + for conf, value in setup_expt_args.arguments.items(): + setup_expt_cmd.add_default_arg(f'--{conf}') + setup_expt_cmd.add_default_arg(str(value)) + + setup_expt_cmd.add_default_arg('--pslot') + setup_expt_cmd.add_default_arg(pslot) + + logger.info(f'Run command: {setup_expt_cmd.command}') + setup_expt_cmd(output='setup_expt.stdout', error='setup_expt.stderr') + + setup_xml_cmd = Executable(Path.absolute(Path.joinpath(Path(HOMEgfs), 'workflow', 'setup_xml.py'))) + expdir = Path.absolute(Path.joinpath(Path(setup_expt_args.arguments.expdir), Path(pslot))) + setup_xml_cmd.add_default_arg(str(expdir)) + + logger.info(f'Run command: {setup_xml_cmd.command}') + setup_xml_cmd(output='setupxml.stdout', error='setupxml.stderr') diff --git a/ci/scripts/driver.sh b/ci/scripts/driver.sh new file mode 100755 index 0000000000..6bd76ca2bc --- /dev/null +++ b/ci/scripts/driver.sh @@ -0,0 +1,151 @@ +#!/bin/bash +set -eux + +##################################################################################### +# +# Script description: Top level driver script for checking PR +# ready for CI regression testing +# +# Abstract: +# +# This script uses GitHub CLI to check for Pull Requests with CI-Ready-${machine} tags on the +# development branch for the global-workflow repo. It then stages tests directories per +# PR number and calls clone-build_ci.sh to perform a clone and full build from $(HOMEgfs)/sorc +# of the PR. It then is ready to run a suite of regression tests with various +# configurations with run_tests.py. +####################################################################################### + +################################################################# +# TODO using static build for GitHub CLI until fixed in HPC-Stack +################################################################# +export GH=${HOME}/bin/gh +export REPO_URL=${REPO_URL:-"https://github.com/NOAA-EMC/global-workflow.git"} + +################################################################ +# Setup the reletive paths to scripts and PS4 for better logging +################################################################ +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +scriptname=$(basename "${BASH_SOURCE[0]}") +echo "Begin ${scriptname} at $(date -u)" || true +export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' + +######################################################################### +# Set up runtime environment varibles for accounts on supproted machines +######################################################################### + +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion) + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac + +###################################################### +# setup runtime env for correct python install and git +###################################################### +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +set -x + +############################################################ +# query repo and get list of open PRs with tags {machine}-CI +############################################################ +pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db" +if [[ ! -f "${pr_list_dbfile}" ]]; then + "${HOMEgfs}/ci/scripts/pr_list_database.py" --create "${pr_list_dbfile}" +fi + +pr_list=$(${GH} pr list --repo "${REPO_URL}" --label "CI-${MACHINE_ID^}-Ready" --state "open" | awk '{print $1}') || true + +for pr in ${pr_list}; do + "${HOMEgfs}/ci/scripts/pr_list_database.py" --add_pr "${pr}" "${pr_list_dbfile}" +done + +pr_list="" +if [[ -f "${pr_list_dbfile}" ]]; then + pr_list=$("${HOMEgfs}/ci/scripts/pr_list_database.py" --display "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Ready | awk '{print $1}') || true +fi +if [[ -z "${pr_list}" ]]; then + echo "no PRs open and ready for checkout/build .. exiting" + exit 0 +fi + + +############################################################# +# Loop throu all open PRs +# Clone, checkout, build, creat set of cases, for each +############################################################# + +for pr in ${pr_list}; do + + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Ready" --add-label "CI-${MACHINE_ID^}-Building" + echo "Processing Pull Request #${pr}" + pr_dir="${GFS_CI_ROOT}/PR/${pr}" + mkdir -p "${pr_dir}" + # call clone-build_ci to clone and build PR + id=$("${GH}" pr view "${pr}" --repo "${REPO_URL}" --json id --jq '.id') + set +e + "${HOMEgfs}/ci/scripts/clone-build_ci.sh" -p "${pr}" -d "${pr_dir}" -o "${pr_dir}/output_${id}" + ci_status=$? + set -e + if [[ ${ci_status} -eq 0 ]]; then + "${HOMEgfs}/ci/scripts/pr_list_database.py" --update_pr "${pr}" Open Built "${pr_list_dbfile}" + #setup space to put an experiment + # export RUNTESTS for yaml case files to pickup + export RUNTESTS="${pr_dir}/RUNTESTS" + #rm -Rf "${pr_dir:?}/RUNTESTS/"* + + ############################################################# + # loop over every yaml file in ${HOMEgfs}/ci/cases + # and create an run directory for each one for this PR loop + ############################################################# + for yaml_config in "${HOMEgfs}/ci/cases/"*.yaml; do + pslot=$(basename "${yaml_config}" .yaml) || true + export pslot + sed -i "/^base:/a\ ACCOUNT: \${SLURM_ACCOUNT}" "${pr_dir}/global-workflow/parm/config/gfs/yaml/defaults.yaml" + sed -i "/^base:/a\ ACCOUNT: \${SLURM_ACCOUNT}" "${pr_dir}/global-workflow/parm/config/gefs/yaml/defaults.yaml" + set +e + "${HOMEgfs}/ci/scripts/create_experiment.py" --yaml "${HOMEgfs}/ci/cases/${pslot}.yaml" --dir "${pr_dir}/global-workflow" + ci_status=$? + set -e + if [[ ${ci_status} -eq 0 ]]; then + { + echo "Created experiment: *SUCCESS*" + echo "Case setup: Completed at $(date) for experiment ${pslot}" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr edit --repo "${REPO_URL}" "${pr}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Running" + "${HOMEgfs}/ci/scripts/pr_list_database.py" --update_pr "${pr}" Open Running "${pr_list_dbfile}" + else + { + echo "Failed to create experiment}: *FAIL* ${pslot}" + echo "Experiment setup: failed at $(date) for experiment ${pslot}" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" + "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" "${pr_list_dbfile}" + fi + done + + else + { + echo "Failed on cloning and building global-workflowi PR: ${pr}" + echo "CI on ${MACHINE_ID^} failed to build on $(date) for repo ${REPO_URL}}" || true + } >> "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + "${GH}" pr edit "${pr}" --repo "${REPO_URL}" --remove-label "CI-${MACHINE_ID^}-Building" --add-label "CI-${MACHINE_ID^}-Failed" + "${HOMEgfs}/ci/scripts/pr_list_database.py" --remove_pr "${pr}" "${pr_list_dbfile}" + fi + "${GH}" pr comment "${pr}" --repo "${REPO_URL}" --body-file "${GFS_CI_ROOT}/PR/${pr}/output_${id}" + +done # looping over each open and labeled PR + +########################################## +# scrub working directory for older files +########################################## +# +#find "${GFS_CI_ROOT}/PR/*" -maxdepth 1 -mtime +3 -exec rm -rf {} \; diff --git a/ci/scripts/pr_list_database.py b/ci/scripts/pr_list_database.py new file mode 100755 index 0000000000..b2bc1bc23d --- /dev/null +++ b/ci/scripts/pr_list_database.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python3 + +import sys +from pathlib import Path +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +import sqlite3 + + +def sql_connection(filename: Path) -> sqlite3.Connection: + """ + Returns an Sqlite3 Cursor object from a given path to a sqlite3 database file + + Parameters + ---------- + filename : Path + Full path to a sqlite3 database file + + Returns + ------- + sqlite3.Connection + Sqlite3 Connection object for updating table + + """ + try: + return sqlite3.connect(Path(filename)) + except sqlite3.Error: + print(sqlite3.Error) + sys.exit(-1) + + +def sql_table(obj: sqlite3.Cursor) -> None: + """ + Creates the initial sqlite3 table for PR states and status + + Parameters + ---------- + obj : sqlite3.Cursor + Cursor object for Sqlite3 + + """ + + obj.execute("CREATE TABLE processing(pr integer PRIMARY KEY, state text, status text)") + + +def sql_insert(obj: sqlite3.Cursor, entities: list) -> None: + """ + Inserts a new row in sqlite3 table with PR, state, and status + + Parameters + ---------- + obj : sqlite3.Cursor + Cursor object for Sqlite3 + entities : list + The list three string values that go into sqlite table (pr, state, status) + + """ + + obj.execute('INSERT INTO processing(pr, state, status) VALUES(?, ?, ?)', entities) + + +def sql_update(obj: sqlite3.Cursor, pr: str, state: str, status: str) -> None: + """Updates table for a given pr with new values for state and status + + Parameters + ---------- + obj : sqlite.sql_connection + sqlite3 Cursor Object + pr : str + The given pr number to update in the table + state : str + The new value for the state (Open, Closed) + status: str + The new value for the status (Ready, Running, Failed) + + """ + + obj.execute(f'UPDATE processing SET state = "{state}", status = "{status}" WHERE pr = {pr}') + + +def sql_fetch(obj: sqlite3.Cursor) -> list: + """ Gets list of all rows in table + + Parameters + ---------- + obj : sqlite.sql_connection + sqlite3 Cursor Object + + """ + + obj.execute('SELECT * FROM processing') + return obj.fetchall() + + +def sql_remove(obj: sqlite3.Cursor, pr: str) -> None: + """ Removes the row from table with given pr number + + Parameters + ---------- + obj : sqlite.sql_connection + sqlite3 Connection Object + pr : str + pr number acting as key for removing the row with in it + + """ + + obj.execute(f'DELETE FROM processing WHERE pr = {pr}').rowcount + + +def input_args(): + + description = """Arguments for creating and updating db file for pr states + """ + + parser = ArgumentParser(description=description, + formatter_class=ArgumentDefaultsHelpFormatter) + + parser.add_argument('sbfile', help='SQLite3 database file with PR list', type=str) + parser.add_argument('--create', help='create sqlite file for pr list status', action='store_true', required=False) + parser.add_argument('--add_pr', nargs=1, metavar='PR', help='add new pr to list (defults to: Open,Ready)', required=False) + parser.add_argument('--remove_pr', nargs=1, metavar='PR', help='removes pr from list', required=False) + parser.add_argument('--update_pr', nargs=3, metavar=('pr', 'state', 'status'), help='updates state and status of a given pr', required=False) + parser.add_argument('--display', help='output pr table', action='store_true', required=False) + + args = parser.parse_args() + return args + + +if __name__ == '__main__': + + args = input_args() + + con = sql_connection(args.sbfile) + obj = con.cursor() + + if args.create: + sql_table(obj) + + if args.add_pr: + rows = sql_fetch(obj) + for row in rows: + if str(row[0]) == str(args.add_pr[0]): + print(f"pr {row[0]} already is in list: nothing added") + sys.exit(0) + + entities = (args.add_pr[0], 'Open', 'Ready') + sql_insert(obj, entities) + + if args.update_pr: + pr = args.update_pr[0] + state = args.update_pr[1] + status = args.update_pr[2] + sql_update(obj, pr, state, status) + + if args.remove_pr: + sql_remove(obj, args.remove_pr[0]) + + if args.display: + rows = sql_fetch(obj) + for row in rows: + print(' '.join(map(str, row))) + + con.commit() + con.close() diff --git a/ci/scripts/pygw b/ci/scripts/pygw new file mode 120000 index 0000000000..77d784f6ca --- /dev/null +++ b/ci/scripts/pygw @@ -0,0 +1 @@ +../../ush/python/pygw/src/pygw \ No newline at end of file diff --git a/ci/scripts/run_ci.sh b/ci/scripts/run_ci.sh new file mode 100755 index 0000000000..8a1a363d32 --- /dev/null +++ b/ci/scripts/run_ci.sh @@ -0,0 +1,87 @@ +#!/bin/bash +set -eux + +##################################################################################### +# +# Script description: BASH script for checking for cases in a given PR and +# simply running rocotorun on each. This script is intended +# to run from within a cron job in the CI Managers account +# Abstract TODO +##################################################################################### + +HOMEgfs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." >/dev/null 2>&1 && pwd )" +scriptname=$(basename "${BASH_SOURCE[0]}") +echo "Begin ${scriptname} at $(date -u)" || true +export PS4='+ $(basename ${BASH_SOURCE})[${LINENO}]' + +######################################################################### +# Set up runtime environment varibles for accounts on supproted machines +######################################################################### + +source "${HOMEgfs}/ush/detect_machine.sh" +case ${MACHINE_ID} in + hera | orion) + echo "Running Automated Testing on ${MACHINE_ID}" + source "${HOMEgfs}/ci/platforms/${MACHINE_ID}.sh" + ;; + *) + echo "Unsupported platform. Exiting with error." + exit 1 + ;; +esac +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/modulefiles" +module load "module_gwsetup.${MACHINE_ID}" +module list +set -eux +rocotorun=$(which rocotorun) +if [[ -z ${var+x} ]]; then + echo "rocotorun being used from ${rocotorun}" +else + echo "rocotorun not found on system" + exit 1 +fi + +pr_list_dbfile="${GFS_CI_ROOT}/open_pr_list.db" + +pr_list="" +if [[ -f "${pr_list_dbfile}" ]]; then + pr_list=$("${HOMEgfs}/ci/scripts/pr_list_database.py" --display "${pr_list_dbfile}" | grep -v Failed | grep Open | grep Running | awk '{print $1}' | head -"${max_concurrent_pr}") || true +fi +if [[ -z "${pr_list}" ]]; then + echo "no PRs open and ready for checkout/build .. exiting" + exit 0 +fi + +############################################################# +# Loop throu all PRs in PR List and look for expirments in +# the RUNTESTS dir and for each one run runcotorun on them +# only up to $max_concurrent_cases will advance at a time +############################################################# + +for pr in ${pr_list}; do + echo "Processing Pull Request #${pr} and looking for cases" + pr_dir="${GFS_CI_ROOT}/PR/${pr}" + # If the directory RUNTESTS is not present then + # setupexpt.py has no been run yet for this PR + if [[ ! -d "${pr_dir}/RUNTESTS" ]]; then + continue + fi + num_cases=0 + for cases in "${pr_dir}/RUNTESTS/"*; do + if [[ ! -d "${cases}" ]]; then + continue + fi + ((num_cases=num_cases+1)) + # No more than two cases are going forward at a time for each PR + if [[ "${num_cases}" -gt "${max_concurrent_cases}" ]]; then + continue + fi + pslot=$(basename "${cases}") + xml="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.xml" + db="${pr_dir}/RUNTESTS/${pslot}/EXPDIR/${pslot}/${pslot}.db" + echo "Running: ${rocotorun} -v 6 -w ${xml} -d ${db}" + "${rocotorun}" -v 10 -w "${xml}" -d "${db}" + done +done diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000000..72173f32a7 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,25 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +# Sphinx doesn't know to clean out the debris from sphinx-gallery +clean: + rm -rf $(BUILDDIR)/* + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + [ -d $(BUILDDIR) ] || mkdir -p $(BUILDDIR) + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -w "$(BUILDDIR)/warnings.log" diff --git a/docs/Release_Notes.gfs.v15.2.0.txt b/docs/Release_Notes.gfs.v15.2.0.txt deleted file mode 100644 index 4f3cbcddd9..0000000000 --- a/docs/Release_Notes.gfs.v15.2.0.txt +++ /dev/null @@ -1,261 +0,0 @@ -GFS v15.2.0 RELEASE NOTES - - -PRELUDE (taken from GFS v15.2.0 SCN) - - GFS version 15.1 was implemented into operation at the 12Z cycle on June 12, 2019. It was the first - GFS implementation with the finite­ volume cubed-sphere (FV3) dynamical core as the Weather Service’s - Next Generation Global Prediction System (NGGPS). - - GFS version 15.2 is a minor upgrade. The major change to the system is to ingest new and replacement - satellite observations for data assimilation. It also contains a few other minor upgrades and bug fixes. - - 1) Assimilate new satellite observations - * GOES-17 AMVs - * GOES-17 has already replaced GOES-15 as the operational GOES-West satellite. Distribution of - GOES-15 products is expected to cease around December 2019 and active assimilation of GOES-17 - AMVs is required to prevent a gap in data coverage. - * Metop-C AMSU and MHS - * Metop-C is now the primary satellite in the 9:30 AM orbit. Assimilation of these data provide - additional impact as well as adding robustness to the system. - * KOMPSAT-5 (GPS-RO) - * Provides additional robustness to the system. - * Addition changes are made to VIIRS AMV ingest code to allow continued use after an expected change - to the BUFR format. - - 2) Assimilate buoyb sea-surface temperature (SST) data - TAC2BUFR changes in the buoy network resulted in a reduction of available SST measurements from buoys - to 10% or less of expected levels. Obsproc and GSI changes were required to restore data counts to - previous levels. - - 3) New product: Graphical Turbulence Guidance (GTG) - Global Turbulence product generated using NCAR GTG algorithm will start being disseminated on NOMADS. - The product will be available 3 hourly from F06 to F36 and horizontal resolution will be a quarter - degree. gfs.t{CC}Z.gtg.0p25.grb2f{HHH} - - 4) Update the Unified Post Processor(UPP) to address a mask issue of several land surface fields over - water bodies in grib2 products. - - This update will make GFS.v15.2 p-grid products to be more consistent with GFS.v14 products, and - remove spurious soil moisture along coastlines. These land surface fields include Soil Moisture, - Soil Temperature, Liquid Volumetric Soil Moisture, WEASD, Snow Depth, Water Runoff, GFLUX Ground - Heat Flux, WILT Wilting Point, and FLDCP Field Capacity. - - Affected product files are: - gfs.t{CC}Z.pgrb2.0p25.F{HHH} - gfs.t{CC}Z.pgrb2b.0p25.g{HHH} - gfs.t{CC}Z.pgrb2.0p50.F{HHH} - gfs.t{CC}Z.pgrb2b.0p50.g{HHH} - gfs.t{CC}Z.pgrb2.1p00.F{HHH} - gfs.t{CC}Z.pgrb2b.1p00.g{HHH} - gfs.t{CC}Z.sfluxgrbf{HHH}.grib2 - Where CC is cycle for 00, 06, 12, 18 UTC, and HHH is forecast hour. - - 5) The vessel icing program uses OISST as input. OISST will not be ported from WCOSS Phase 1 to Phase 3 - after Phase 1 is decommissioned in 2020. A decision was made to move the vessel icing program - within the Unified Post Processor(UPP) and use GFS forecast skin temperature as input. Current vessel - icing product in operation (sice.tCCz.siceg) has a 1-deg resolution and is disseminated to the public at - - ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/omb/prod/sice.yyyymmdd/ - - New vessel icing product will be included as a variable (ICEG) in GFS p-grid products gfs.tCCz.pgrb2.xpxx.fxxx - and gfs.tCCz.pgrb2b.xpxx.fxxx at 0.25, 0.5, and 1.0-deg resolutions, and be disseminated to the public at - - ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/gfs.yyyymmdd/ - - 6) Added three stations to station time series bufr soundings: - - 006011 62.02N 6.76W TOR 00 Thorshvan, Denmark 54 Usr Rqst 4-19 - 999913 15.51S 128.15E WYN 00 Wyndham aerodrome Australia 4 Usr Rqst 1-19 - 999914 57.48N 7.36W EGPL 00 Benbecula, Scotland, UK 6 Usr Rqst 1-19 - - The affected output files are: - gfs_yyyymmddhh.sfc - gfs_yyyymmddhh.snd - gfs.tCCz.bufrsnd.tar.gz - - Three additional files for the stations: - bufr.006011.yyyymmddhh - bufr.999913.yyyymmddhh - bufr.999914.yyyymmddhh - - 7) Reduction of water temperature biases in small lakes. - For small lakes adequate observations do not always exit to support the analysis of lake surface - temperature, often leading to significant departures from both the climatology and real-time observation. - Two changes were introduced to ensure that lake temperatures do not deviate from the climatology when - observations are not available. The first change is to replace a surface mask file at 0.5-degree - resolution with the one on the T1534 Gaussian grid (~13km) to prevent unrealistic SST climatology - from being used for updating the background of the near sea-surface temperature analysis over small - water bodies, such as those in the Great Salt Lake. The second change is to reduce the relaxation - time scale of the SST to climatology in GDAS forecast step from 90 days to 10 days. - - 8) Changes to NOAAPORT/SBN - Product Removals - * GADS FAX product which contains tropical surface analysis in TIF format with G4 compression. - - - -IMPLEMENTATION INSTRUCTIONS - - * NOAA Vlab GIT is used to manage GFS.v15.2 code. The SPA(s) handling the GFS.v15.2 implementation need to have - permission to clone Vlab gerrit repositories. So far Wojciech Cencek has been given access to all GFS.v15.2 - related git repositories. Please contact Kate.Friedman@noaa.gov or Hang.Lei@noaa.gov if there is any VLAB - access issue and/or the individual code managers listed under item #6) below. Please follow the following - steps to install the package on WCOSS DELL - - 1) cd $NWROOTp3 - 2) mkdir gfs.v15.2.0 - 3) cd gfs.v15.2.0 - 4) git clone --recursive gerrit:global-workflow . - 5) git checkout feature/dev-v15.2 - 6) cd sorc - 7) ./checkout.sh - This script extracts the following GFS components from gerrit - MODEL -- tag nemsfv3_gfsv15.2.1 Jun.Wang@noaa.gov - GSI -- tag fv3da_gfs.v15.2.0 Russ.Treadon@noaa.gov - UPP -- tag ncep_post_gtg.v1.1.4 Wen.Meng@noaa.gov - WAFS -- tag gfs_wafs.v5.0.9 Yali.Mao@noaa.gov - - - 8) ./build_all.sh - *This script compiles all GFS components. Runtime output from the build for each package is written - to log files in directory logs. To build an individual program, for instance, gsi, use build_gsi.sh. - - 9) ./link_fv3gfs.sh nco dell - - * Note: 1) ecflow suite definition and scripts are saved in gfs.v15.2.0/ecflow/ecf - 2) ncep_post_gtg.v1.1.4 contains restricted GTG (Graphic Turbulence Guidance) code provided by - NCAR. Please do not post the GTG code in any public domain. - - - - -JOB CHANGES - - * See docs/Release_Notes.gfs_downstream.v15.2.0.txt - - -SORC CHANGES - - * sorc/ - * checkout.sh - update to check out the following tags - * NEMSfv3gfs nemsfv3_gfsv15.2.1 - * ProdGSI fv3da_gfsv15.2.0 - * EMC_post_gtg ncep_post_gtg.v1.1.4 - * EMC_gfs_wafs gfs_wafs.v5.0.9 - * sorc/global_chgres.fd/ - * sfcsub.F - bug fix. set logical variable (lmask) to be false for a land surface variable - * sorc/global_cycle.fd/ - * sfcsub.F - bug fix. set logical variable (lmask) to be false for a land surface variable - - -SCRIPT CHANGES - - * scripts/ - * run_gfsmos_master.sh.cray - remove reference to COMROOTp1 - * run_gfsmos_master.sh.dell - remove reference to COMROOTp1 - * additional script changes documented in docs/Release_Notes.gfs_downstream.v15.2.0.txt - - -PARM/CONFIG CHANGES - - * parm/ - * Two files are modified to set a - * parm/config/config.base.emc.dyn - set 10 day relaxaion time scale to SST climatology in GDAS forecast - * parm/config/config.base.nco.static - set 10 day relaxaion time scale to SST climatology in GDAS forecast - - * Two files were modified for adding three bufr stations: - * parm/product/bufr_stalist.meteo.gfs - * parm/product/bufr_stalist.meteo.gfs3 - - -FIX CHANGES - - * Files in fix/fix_gsi altered by GFS DA v15.2. See GFS DA v15.2 release notes - (sorc/gsi.fd/doc/Release_Notes.gfs_da.v15.2.0.txt) for details - - -PRODUCT CHANGES - - * see SCN - - -RESOURCE INFORMATION - - * Frequency of run - * No change from GFS v15.1 - - * Commonly used libraries, compiler, and modules are defined in gfs.v15.2.0/modulefiles. For nemsfv3gfs, gsi, upp, wafs - they maintain their own module files under gfs.v15.2.0/sorc/(fv3gfs gsi gfs_post global_wafs).fd/modulefiles - * GSI updated to use bufr/11.3.0 - - * Data retention time under $COMROOTp3 for GFS.v15.2 should be the same as GFS.v15.1. - - * Disk space: - * About 4 Gb more per gdas cycle due to additional observation data in gdas and enkf diagnostic files - - * Computational resources and run times: - * Due to processing additional observation data the runtime for the following jobs increases with - respect to GFS v15.1 as noted below - * gfs_analysis : about 30 seconds longer (27.4 minutes for GFS v15.1 -vs- 27.9 minutes for GFS v15.2) - * gdas_enkf_select_obs : about 1 minute longer (3.7 for GFS v15.1 -vs- 4.7 for GFS v15.2) - * gdas_enkf_innovate_obs_grp*: about 30 seconds longer (14.8 for GFS v15.1 -vs - 15.3 for GFS v15.2) - * gdas_enkf_update : about 20 seconds longer (6.4 for GFS v15.1 -vs- 6.7 for GFS v15.2) - - - -PRE-IMPLEMENTATION TESTING REQUIREMENTS - - * Which production jobs should be tested as part of this implementation? - * The entire GFS v15.2 package needs to be installed and tested. EMC can run the same date - and compare NCO and EMC output to confirm the EMC and NCO tests reproduce each other - - * Does this change require a 30-day evaluation? - * No. - - - * Suggested evaluators - * Please contact the following EMC staff for the indicated components - Fanglin.Yang@noaa.gov - MODEL - Russ.Treadon@noaa.gov - DA - Wen.Meng@noaa.gov - UPP - Yali.Mao@noaa.gov - WAFS - Boi.Vuong@noaa.gov - downstream products - - -DISSEMINATION INFORMATION - - * Where should this output be sent? - * No change from GFS v15.1 - - * Who are the users? - * No change from GFS v15.1 - - * Which output files should be transferred from PROD WCOSS to DEV WCOSS? - * No change from GFS v15.1 - - * Directory changes - * No change from GFS v15.1 - - * File changes. - * See SCN - - -HPSS ARCHIVE - - No change from GFS v15.1 - - - -JOB DEPENDENCIES & FLOW DIAGRAM - * No change from GFS v15.1 - - -=========== -Prepared by -Fanglin.Yang@noaa -Russ.Treadon@noaa.gov -Boi.Vuong@noaa.gov -Wen.Meng@noaa.gov - - diff --git a/docs/Release_Notes.gfs.v15.2.2.txt b/docs/Release_Notes.gfs.v15.2.2.txt deleted file mode 100644 index c1978fcf23..0000000000 --- a/docs/Release_Notes.gfs.v15.2.2.txt +++ /dev/null @@ -1,269 +0,0 @@ -GFS v15.2.2 - updated by SPA on 11/13/2019 - -Fixed missing gempak pathes in GFS_GEMPAK_NCDC_UPAPGIF job that caused the black/white background switch in the Fax chart. - -Change: -jobs/JGFS_GEMPAK_NCDC_UPAPGIF - - -GFS v15.2.0 RELEASE NOTES - - -PRELUDE (taken from GFS v15.2.0 SCN) - - GFS version 15.1 was implemented into operation at the 12Z cycle on June 12, 2019. It was the first - GFS implementation with the finite­ volume cubed-sphere (FV3) dynamical core as the Weather Service’s - Next Generation Global Prediction System (NGGPS). - - GFS version 15.2 is a minor upgrade. The major change to the system is to ingest new and replacement - satellite observations for data assimilation. It also contains a few other minor upgrades and bug fixes. - - 1) Assimilate new satellite observations - * GOES-17 AMVs - * GOES-17 has already replaced GOES-15 as the operational GOES-West satellite. Distribution of - GOES-15 products is expected to cease around December 2019 and active assimilation of GOES-17 - AMVs is required to prevent a gap in data coverage. - * Metop-C AMSU and MHS - * Metop-C is now the primary satellite in the 9:30 AM orbit. Assimilation of these data provide - additional impact as well as adding robustness to the system. - * KOMPSAT-5 (GPS-RO) - * Provides additional robustness to the system. - * Addition changes are made to VIIRS AMV ingest code to allow continued use after an expected change - to the BUFR format. - - 2) Assimilate buoyb sea-surface temperature (SST) data - TAC2BUFR changes in the buoy network resulted in a reduction of available SST measurements from buoys - to 10% or less of expected levels. Obsproc and GSI changes were required to restore data counts to - previous levels. - - 3) New product: Graphical Turbulence Guidance (GTG) - Global Turbulence product generated using NCAR GTG algorithm will start being disseminated on NOMADS. - The product will be available 3 hourly from F06 to F36 and horizontal resolution will be a quarter - degree. gfs.t{CC}Z.gtg.0p25.grb2f{HHH} - - 4) Update the Unified Post Processor(UPP) to address a mask issue of several land surface fields over - water bodies in grib2 products. - - This update will make GFS.v15.2 p-grid products to be more consistent with GFS.v14 products, and - remove spurious soil moisture along coastlines. These land surface fields include Soil Moisture, - Soil Temperature, Liquid Volumetric Soil Moisture, WEASD, Snow Depth, Water Runoff, GFLUX Ground - Heat Flux, WILT Wilting Point, and FLDCP Field Capacity. - - Affected product files are: - gfs.t{CC}Z.pgrb2.0p25.F{HHH} - gfs.t{CC}Z.pgrb2b.0p25.g{HHH} - gfs.t{CC}Z.pgrb2.0p50.F{HHH} - gfs.t{CC}Z.pgrb2b.0p50.g{HHH} - gfs.t{CC}Z.pgrb2.1p00.F{HHH} - gfs.t{CC}Z.pgrb2b.1p00.g{HHH} - gfs.t{CC}Z.sfluxgrbf{HHH}.grib2 - Where CC is cycle for 00, 06, 12, 18 UTC, and HHH is forecast hour. - - 5) The vessel icing program uses OISST as input. OISST will not be ported from WCOSS Phase 1 to Phase 3 - after Phase 1 is decommissioned in 2020. A decision was made to move the vessel icing program - within the Unified Post Processor(UPP) and use GFS forecast skin temperature as input. Current vessel - icing product in operation (sice.tCCz.siceg) has a 1-deg resolution and is disseminated to the public at - - ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/omb/prod/sice.yyyymmdd/ - - New vessel icing product will be included as a variable (ICEG) in GFS p-grid products gfs.tCCz.pgrb2.xpxx.fxxx - and gfs.tCCz.pgrb2b.xpxx.fxxx at 0.25, 0.5, and 1.0-deg resolutions, and be disseminated to the public at - - ftp://ftp.ncep.noaa.gov/pub/data/nccf/com/gfs/prod/gfs.yyyymmdd/ - - 6) Added three stations to station time series bufr soundings: - - 006011 62.02N 6.76W TOR 00 Thorshvan, Denmark 54 Usr Rqst 4-19 - 999913 15.51S 128.15E WYN 00 Wyndham aerodrome Australia 4 Usr Rqst 1-19 - 999914 57.48N 7.36W EGPL 00 Benbecula, Scotland, UK 6 Usr Rqst 1-19 - - The affected output files are: - gfs_yyyymmddhh.sfc - gfs_yyyymmddhh.snd - gfs.tCCz.bufrsnd.tar.gz - - Three additional files for the stations: - bufr.006011.yyyymmddhh - bufr.999913.yyyymmddhh - bufr.999914.yyyymmddhh - - 7) Reduction of water temperature biases in small lakes. - For small lakes adequate observations do not always exit to support the analysis of lake surface - temperature, often leading to significant departures from both the climatology and real-time observation. - Two changes were introduced to ensure that lake temperatures do not deviate from the climatology when - observations are not available. The first change is to replace a surface mask file at 0.5-degree - resolution with the one on the T1534 Gaussian grid (~13km) to prevent unrealistic SST climatology - from being used for updating the background of the near sea-surface temperature analysis over small - water bodies, such as those in the Great Salt Lake. The second change is to reduce the relaxation - time scale of the SST to climatology in GDAS forecast step from 90 days to 10 days. - - 8) Changes to NOAAPORT/SBN - Product Removals - * GADS FAX product which contains tropical surface analysis in TIF format with G4 compression. - - - -IMPLEMENTATION INSTRUCTIONS - - * NOAA Vlab GIT is used to manage GFS.v15.2 code. The SPA(s) handling the GFS.v15.2 implementation need to have - permission to clone Vlab gerrit repositories. So far Wojciech Cencek has been given access to all GFS.v15.2 - related git repositories. Please contact Kate.Friedman@noaa.gov or Hang.Lei@noaa.gov if there is any VLAB - access issue and/or the individual code managers listed under item #6) below. Please follow the following - steps to install the package on WCOSS DELL - - 1) cd $NWROOTp3 - 2) mkdir gfs.v15.2.0 - 3) cd gfs.v15.2.0 - 4) git clone --recursive gerrit:global-workflow . - 5) git checkout feature/dev-v15.2 - 6) cd sorc - 7) ./checkout.sh - This script extracts the following GFS components from gerrit - MODEL -- tag nemsfv3_gfsv15.2.1 Jun.Wang@noaa.gov - GSI -- tag fv3da_gfs.v15.2.0 Russ.Treadon@noaa.gov - UPP -- tag ncep_post_gtg.v1.1.4 Wen.Meng@noaa.gov - WAFS -- tag gfs_wafs.v5.0.9 Yali.Mao@noaa.gov - - - 8) ./build_all.sh - *This script compiles all GFS components. Runtime output from the build for each package is written - to log files in directory logs. To build an individual program, for instance, gsi, use build_gsi.sh. - - 9) ./link_fv3gfs.sh nco dell - - * Note: 1) ecflow suite definition and scripts are saved in gfs.v15.2.0/ecflow/ecf - 2) ncep_post_gtg.v1.1.4 contains restricted GTG (Graphic Turbulence Guidance) code provided by - NCAR. Please do not post the GTG code in any public domain. - - - - -JOB CHANGES - - * See docs/Release_Notes.gfs_downstream.v15.2.0.txt - - -SORC CHANGES - - * sorc/ - * checkout.sh - update to check out the following tags - * NEMSfv3gfs nemsfv3_gfsv15.2.1 - * ProdGSI fv3da_gfsv15.2.0 - * EMC_post_gtg ncep_post_gtg.v1.1.4 - * EMC_gfs_wafs gfs_wafs.v5.0.9 - * sorc/global_chgres.fd/ - * sfcsub.F - bug fix. set logical variable (lmask) to be false for a land surface variable - * sorc/global_cycle.fd/ - * sfcsub.F - bug fix. set logical variable (lmask) to be false for a land surface variable - - -SCRIPT CHANGES - - * scripts/ - * run_gfsmos_master.sh.cray - remove reference to COMROOTp1 - * run_gfsmos_master.sh.dell - remove reference to COMROOTp1 - * additional script changes documented in docs/Release_Notes.gfs_downstream.v15.2.0.txt - - -PARM/CONFIG CHANGES - - * parm/ - * Two files are modified to set a - * parm/config/config.base.emc.dyn - set 10 day relaxaion time scale to SST climatology in GDAS forecast - * parm/config/config.base.nco.static - set 10 day relaxaion time scale to SST climatology in GDAS forecast - - * Two files were modified for adding three bufr stations: - * parm/product/bufr_stalist.meteo.gfs - * parm/product/bufr_stalist.meteo.gfs3 - - -FIX CHANGES - - * Files in fix/fix_gsi altered by GFS DA v15.2. See GFS DA v15.2 release notes - (sorc/gsi.fd/doc/Release_Notes.gfs_da.v15.2.0.txt) for details - - -PRODUCT CHANGES - - * see SCN - - -RESOURCE INFORMATION - - * Frequency of run - * No change from GFS v15.1 - - * Commonly used libraries, compiler, and modules are defined in gfs.v15.2.0/modulefiles. For nemsfv3gfs, gsi, upp, wafs - they maintain their own module files under gfs.v15.2.0/sorc/(fv3gfs gsi gfs_post global_wafs).fd/modulefiles - * GSI updated to use bufr/11.3.0 - - * Data retention time under $COMROOTp3 for GFS.v15.2 should be the same as GFS.v15.1. - - * Disk space: - * About 4 Gb more per gdas cycle due to additional observation data in gdas and enkf diagnostic files - - * Computational resources and run times: - * Due to processing additional observation data the runtime for the following jobs increases with - respect to GFS v15.1 as noted below - * gfs_analysis : about 30 seconds longer (27.4 minutes for GFS v15.1 -vs- 27.9 minutes for GFS v15.2) - * gdas_enkf_select_obs : about 1 minute longer (3.7 for GFS v15.1 -vs- 4.7 for GFS v15.2) - * gdas_enkf_innovate_obs_grp*: about 30 seconds longer (14.8 for GFS v15.1 -vs - 15.3 for GFS v15.2) - * gdas_enkf_update : about 20 seconds longer (6.4 for GFS v15.1 -vs- 6.7 for GFS v15.2) - - - -PRE-IMPLEMENTATION TESTING REQUIREMENTS - - * Which production jobs should be tested as part of this implementation? - * The entire GFS v15.2 package needs to be installed and tested. EMC can run the same date - and compare NCO and EMC output to confirm the EMC and NCO tests reproduce each other - - * Does this change require a 30-day evaluation? - * No. - - - * Suggested evaluators - * Please contact the following EMC staff for the indicated components - Fanglin.Yang@noaa.gov - MODEL - Russ.Treadon@noaa.gov - DA - Wen.Meng@noaa.gov - UPP - Yali.Mao@noaa.gov - WAFS - Boi.Vuong@noaa.gov - downstream products - - -DISSEMINATION INFORMATION - - * Where should this output be sent? - * No change from GFS v15.1 - - * Who are the users? - * No change from GFS v15.1 - - * Which output files should be transferred from PROD WCOSS to DEV WCOSS? - * No change from GFS v15.1 - - * Directory changes - * No change from GFS v15.1 - - * File changes. - * See SCN - - -HPSS ARCHIVE - - No change from GFS v15.1 - - - -JOB DEPENDENCIES & FLOW DIAGRAM - * No change from GFS v15.1 - - -=========== -Prepared by -Fanglin.Yang@noaa -Russ.Treadon@noaa.gov -Boi.Vuong@noaa.gov -Wen.Meng@noaa.gov - - diff --git a/docs/Release_Notes.gfs_downstream.v16.0.0.txt b/docs/Release_Notes.gfs_downstream.v16.0.0.txt deleted file mode 100644 index 5ee6238b9d..0000000000 --- a/docs/Release_Notes.gfs_downstream.v16.0.0.txt +++ /dev/null @@ -1,114 +0,0 @@ -RELEASE NOTES: GFS.v16.0.0 downstream products - released March 31, 2020 (tentative date) - -CODE CHANGES - No code change - -AWIPS CHANGES - Removed field "5WAVH" in All parm crads for AWIPS 20km and 1.0 deg (UPP planed to remove in GFS v16.0) - -GEMPAK CHANGES - Removed simulated GOES 12/13 in GEMPAK and PGRB2 - Added simulated GOES 16/17 in GEMPAK and PGRB2 in LAT/LON 0.25 deg - -JOB CHANGES - - Removed JGDAS_BULLS_NAVY - to be retired in GFS V16.0 - - Removed JGDAS_TROPC - Rteired in GFS v15.2.0 - - The remain GFS downstream jobs are following: - - JGDAS_ATMOS_GEMPAK - - JGDAS_ATMOS_GEMPAK_META_NCDC - - JGFS_ATMOS_AWIPS_G2 - - JGFS_ATMOS_FBWIND - - JGFS_ATMOS_GEMPAK - - JGFS_ATMOS_GEMPAK_META - - JGFS_ATMOS_GEMPAK_PGRB2_SPEC - - JGFS_ATMOS_AWIPS_20KM_1P0DEG - - JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF - - JGFS_ATMOS_PGRB2_SPEC_NPOESS - -SCRIPTS CHANGES - The following script have been removed from GFS v16.0.0 - - exgfs_grib_awips_g2.sh.ecf (retired in June 12, 2019) - - exgdas_bulls_navy.sh.ecf (Plan to retire in GFSS v16.0.0) GDAS and NAVY bulletins - - Removed WINTEMV bulltin in script exgfs_atmos_fbwind.sh (WINTEMV bulletin plan to retire in GFS v16.0.0) - -PARM/wmo - - Removed parm in grid 160,161,213,254,225 -----> Retired in GFS v15.2.7 - --DRIVER - All Job cards have same setting for testing on DELL with real-time GFS v15.2 data. - ( An example: JGFS_ATMOS_GEMPAK ) - # LSBATCH: User input - #BSUB -J gfs_gempak_00 - #BSUB -o /gpfs/dell2/ptmp/Boi.Vuong/output/gfs_gempak_00.o%J - #BSUB -q debug - #BSUB -cwd /gpfs/dell2/ptmp/Boi.Vuong/output - #BSUB -W 00:30 - #BSUB -P GFS-DEV - #BSUB -n 24 # 24 tasks - #BSUB -R span[ptile=12] # 12 task per node - #BSUB -R affinity[core(1):distribute=balance] # using 12 cores on node and bind to 1 - # core per task and distribute across sockets - - #################################### - ## Load the GRIB Utilities modules - #################################### - module load EnvVars/1.0.2 - module load ips/18.0.1.163 - module load CFP/2.0.1 - module load impi/18.0.1 - module load lsf/10.1 - module load prod_util/1.1.4 - module load prod_envir/1.0.3 - module load grib_util/1.1.0 - ########################################### - # Now set up GEMPAK/NTRANS environment - ########################################### - module load gempak/7.3.3 - module list - -FV3 GFS DRIVER: - All drivers are used to test GFS downtream jobs in gfs.v16.0.0/driver/product/run_*_dell.sh_xx where is xx is cycle - -The followig jobs, scripts, parm have been modified to meet NCO - EE2 implementation standards. -JOB CHANGES (no changes) - JGDAS_ATMOS_GEMPAK - JGDAS_ATMOS_GEMPAK_META_NCDC - JGFS_ATMOS_AWIPS_G2 - JGFS_ATMOS_FBWIND - JGFS_ATMOS_GEMPAK - JGFS_ATMOS_GEMPAK_META - JGFS_ATMOS_GEMPAK_PGRB2_SPEC - JGFS_ATMOS_AWIPS_20KM_1P0DEG - JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF - JGFS_ATMOS_PGRB2_SPEC_NPOESS - -SCRIPTS CHANGES - exgdas_atmos_nawips.sh - exgdas_atmos_gempak_gif_ncdc.sh - exgfs_atmos_gempak_gif_ncdc_skew_t.sh - exgfs_atmos_awips_20km_1p0deg.sh - exgfs_atmos_fbwind.sh - exgfs_atmos_gempak_meta.sh - exgfs_atmos_grib_awips.sh - exgfs_atmos_nawips.sh - exgFS_atmos_grib2_special_npoess.sh - exgfs_atmos_goes_nawips.sh -removed dependencies for gempak on phase 1 - exgfs_atmos_nawips.sh -removed dependencies for gempak on phase 1 - -PARM CHANGES - Removed 5WAVH in parm cards for AWIPS products in GFS v16.0.0 - (NCEP POST stopped producing 5WAVH (plan to retire "5WAVH") in GFS v16.0.0 - -USH CHANGES - No changes - -GEMPAK CHANGES - - Added nagrib.tbl file in gempak's FIX directory - -ECFLOW CHANGES - -Removed GFS FAX, GFS FAX WAFS and GDAS_TROPC in ecflow suite definition and scripts - -=========== -Prepared by -Boi.Vuong@noaa.gov diff --git a/docs/doxygen/compile b/docs/doxygen/compile index 226f267ac9..1273edab98 100755 --- a/docs/doxygen/compile +++ b/docs/doxygen/compile @@ -1,20 +1,22 @@ -#!/bin/ksh -x +#!/bin/bash -machine=${1:-${machine:-"WCOSS_C"}} +set -ex + +machine=${1:-${machine:-"HERA"}} machine=$(echo $machine | tr '[a-z]' '[A-Z]') if [ $machine = "HERA" ]; then doxygen=/usr/bin/doxygen -elif [ $machine = "WCOSS_C" ]; then - doxygen=/gpfs/hps3/emc/hwrf/noscrub/soft/doxygen-1.8.10/bin/doxygen -elif [ $machine = "WCOSS" ]; then - doxygen=/hwrf/noscrub/soft/doxygen-1.8.10/bin/doxygen +elif [ $machine = "ORION" ]; then + doxygen=/bin/doxygen elif [ $machine = "JET" ]; then doxygen=/contrib/doxygen/1.8.10/bin/doxygen else - echo "machine $machine is unrecognized, ABORT!" - echo "try system doxygen" + echo "machine $machine is unrecognized!" + echo "trying system doxygen" doxygen=$(which doxygen) + rc=$? + [[ $rc -ne 0 ]] && (echo "doxygen not found, ABORT!"; exit 1) fi $doxygen diff --git a/docs/doxygen/mainpage.h b/docs/doxygen/mainpage.h index ac26cd861c..40e8e6f946 100644 --- a/docs/doxygen/mainpage.h +++ b/docs/doxygen/mainpage.h @@ -21,7 +21,7 @@ This is a very much a work in progress and any issues should be reported back an To setup an experiment, a python script \c setup_expt.py (located in \c fv3gfs/ush) can be used: $> setup_expt.py -h - usage: setup_expt.py [-h] [--machine {HERA,WCOSS_C}] --pslot PSLOT + usage: setup_expt.py [-h] --pslot PSLOT [--configdir CONFIGDIR] [--idate IDATE] [--icsdir ICSDIR] [--resdet RESDET] [--resens RESENS] [--comrot COMROT] [--expdir EXPDIR] [--nens NENS] [--cdump CDUMP] @@ -32,8 +32,6 @@ To setup an experiment, a python script \c setup_expt.py (located in \ optional arguments: -h, --help show this help message and exit - --machine machine name - (default: WCOSS_C) --pslot parallel experiment name [REQUIRED] (default: None) --configdir full path to directory containing the config files @@ -58,8 +56,8 @@ To setup an experiment, a python script \c setup_expt.py (located in \ The above script creates directories \c EXPDIR and \c COMROT. It will make links for initial conditions from a location provided via the \c --icsdir argument for a chosen resolution for the control \c --resdet and the ensemble \c --resens. Experiment name is controlled by the input argument \c --pslot. The script will ask user input in case any of the directories already exist. It will copy experiment configuration files into the \c EXPDIR from \c CONFIGDIR. Sample initial conditions for a few resolutions are available at:
-Theia: /scratch4/NCEPDEV/da/noscrub/Rahul.Mahajan/ICS
-WCOSS Cray: /gpfs/hps/emc/da/noscrub/Rahul.Mahajan/ICS +Hera: TODO: /path/here/for/initial/conditions
+Orion: TODO: /path/here/for/initial/conditions
Next step is for the user to go through the individual config files (atleast \c config.base) and customize the experiment configuration based on user needs. A stock configuration will be provided at a later stage, but it is imperative that the user understand the steps involved in the system. diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000000..6247f7e231 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/note_fixfield.txt b/docs/note_fixfield.txt index 292947353c..af2539e48a 100644 --- a/docs/note_fixfield.txt +++ b/docs/note_fixfield.txt @@ -2,9 +2,10 @@ For EMC, the fix fields for running the model are not included in git repository. They are saved locally on all platforms -Venus/Mars: /gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix -Surge/Luna: /gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix -Hera: /scratch1/NCEPDEV/global/glopara/fix +Hera: /scratch1/NCEPDEV/global/glopara/fix +Orion: /work/noaa/global/glopara/fix +Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix +S4: /data/prod/glopara/fix ------------------------------------------------------------------------------ 09/28/2018 diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000000..9c7258463b --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,2 @@ +sphinxcontrib-bibtex +sphinx_rtd_theme diff --git a/docs/source/_static/GFS_v16_flowchart.png b/docs/source/_static/GFS_v16_flowchart.png new file mode 100644 index 0000000000..963c915768 Binary files /dev/null and b/docs/source/_static/GFS_v16_flowchart.png differ diff --git a/docs/source/_static/custom.css b/docs/source/_static/custom.css new file mode 100644 index 0000000000..85a59fca39 --- /dev/null +++ b/docs/source/_static/custom.css @@ -0,0 +1,19 @@ +@import "default.css"; + +div.admonition-todo { +border-top: 2px solid red; +border-bottom: 2px solid red; +border-left: 2px solid red; +border-right: 2px solid red; +background-color: #ff6347 +} + +p.admonition-title { + display: offline; +} + +/*p.first.admonition-title { +background-color: #aa6347; +width: 100%; +} +*/ diff --git a/docs/source/_static/fv3_rocoto_view.png b/docs/source/_static/fv3_rocoto_view.png new file mode 100644 index 0000000000..02265122fe Binary files /dev/null and b/docs/source/_static/fv3_rocoto_view.png differ diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css new file mode 100644 index 0000000000..9713e89ab2 --- /dev/null +++ b/docs/source/_static/theme_overrides.css @@ -0,0 +1,9 @@ +/* !important prevents the common CSS stylesheets from overriding this CSS since on RTD they are loaded after this stylesheet */ + +.wy-nav-content { + max-width: 100% !important; +} + +.wy-table-responsive table td { + white-space: normal !important; +} diff --git a/docs/source/clone.rst b/docs/source/clone.rst new file mode 100644 index 0000000000..c31968ec2e --- /dev/null +++ b/docs/source/clone.rst @@ -0,0 +1,153 @@ +=============================== +Clone and build Global Workflow +=============================== + +^^^^^^^^^^^^^^^^^^ +Quick Instructions +^^^^^^^^^^^^^^^^^^ + +Quick clone/build/link instructions (more detailed instructions below). + +.. note:: + Here we are making the assumption that you are using the workflow to run an experiment and so are working from the authoritative repository. If you are using a development branch then follow the instructions in :doc:`development.rst`. Once you do that you can follow the instructions here with the only difference being the repository/fork you are cloning from. + +For forecast-only (coupled or uncoupled): + +:: + + git clone https://github.com/NOAA-EMC/global-workflow.git + cd global-workflow/sorc + ./checkout.sh + ./build_all.sh + ./link_workflow.sh + +For cycled (w/ data assimilation): + +:: + + git clone https://github.com/NOAA-EMC/global-workflow.git + cd global-workflow/sorc + ./checkout.sh -g + ./build_all.sh + ./link_workflow.sh + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Clone workflow and component repositories +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +******** +Workflow +******** + +There are several ways to clone repositories from GitHub. Below we describe how to clone the global-workflow using either the ssh or https methods. **The ssh method is highly preferred and recommended.** + +ssh method (using a password protected SSH key): + +:: + + git clone git@github.com:NOAA-EMC/global-workflow.git + +.. note:: + When using ssh methods you need to make sure that your GitHub account is configured for the computer from which you are accessing the repository (See `this link `_) + +https method: + +:: + + git clone https://github.com/NOAA-EMC/global-workflow.git + +Check what you just cloned (by default you will have only the develop branch): + +:: + + cd global-workflow + git branch + * develop + +You now have a cloned copy of the global-workflow git repository. To checkout a branch or tag in your clone: + +:: + + git checkout BRANCH_NAME + +.. note:: + Branch must already exist. If it does not you need to make a new branch using the ``-b`` flag: + +:: + + git checkout -b BRANCH_NAME + +The ``checkout`` command will checkout BRANCH_NAME and switch your clone to that branch. Example: + +:: + + git checkout my_branch + git branch + * my_branch + develop + +********** +Components +********** + +Once you have cloned the workflow repository it's time to checkout/clone its components. The components will be checked out under the ``/sorc`` folder via a script called checkout.sh. Run the script with no arguments for forecast-only: + +:: + + cd sorc + ./checkout.sh + +Or with the ``-g`` switch to include data assimilation (GSI) for cycling: + +:: + + cd sorc + ./checkout.sh -g + +If wishing to run with the operational GTG UPP and WAFS (only for select users) provide the ``-o`` flag with checkout.sh: + +:: + + ./checkout.sh -o + +Each component cloned via checkout.sh will have a log (``/sorc/logs/checkout-COMPONENT.log``). Check the screen output and logs for clone errors. + +^^^^^^^^^^^^^^^^ +Build components +^^^^^^^^^^^^^^^^ + +Under the ``/sorc`` folder is a script to build all components called ``build_all.sh``. After running checkout.sh run this script to build all components codes: + +:: + + ./build_all.sh [-a UFS_app][-c build_config][-h][-v] + -a UFS_app: + Build a specific UFS app instead of the default + -c build_config: + Selectively build based on the provided config instead of the default config + -h: + Print usage message and exit + -v: + Run all scripts in verbose mode + +A partial build option is also available via two methods: + + a) modify gfs_build.cfg config file to disable/enable particular builds and then rerun build_all.sh + + b) run individual build scripts also available in ``/sorc`` folder for each component or group of codes + +^^^^^^^^^^^^^^^ +Link components +^^^^^^^^^^^^^^^ + +At runtime the global-workflow needs all pieces in place within the main superstructure. To establish this a link script is run to create symlinks from the top level folders down to component files checked out in ``/sorc`` folders. + +After running the checkout and build scripts run the link script: + +:: + + ./link_workflow.sh [-o] + +Where: + ``-o``: Run in operations (NCO) mode. This creates copies instead of using symlinks and is generally only used by NCO during installation into production. + diff --git a/docs/source/components.rst b/docs/source/components.rst new file mode 100644 index 0000000000..9e4377f739 --- /dev/null +++ b/docs/source/components.rst @@ -0,0 +1,105 @@ +########################### +Global Workflow Components +########################### + +The global-workflow is a combination of several components working together to prepare, analyze, produce, and post-process forecast data. + +The major components of the system are: + +* Workflow +* Pre-processing +* Analysis +* Forecast +* Post-processing +* Verification + +The Global Workflow repository contains the workflow and script layers. After running the checkout script, the code and additional offline scripts for the analysis, forecast, and post-processing components will be present. Any non-workflow component is known as a sub-module. All of the sub-modules of the system reside in their respective repositories on GitHub. The global-workflow sub-modules are obtained by running the checkout script found under the /sorc folder. + +====================== +Component repositories +====================== + +Components checked out via sorc/checkout.sh: + +* **GFS UTILS** (https://github.com/ufs-community/gfs_utils): Utility codes needed by Global Workflow to run the GFS configuration +* **UFS-Weather-Model** (https://github.com/ufs-community/ufs-weather-model): This is the core model used by the Global-Workflow to provide forecasts. The UFS-weather-model repository is an umbrella repository consisting of cooupled component earth systeme that are all checked out when we check out the code at the top level of the repoitory +* **GSI** (https://github.com/NOAA-EMC/GSI): This is the core code base for atmospheric Data Assimilation +* **GSI UTILS** (https://github.com/NOAA-EMC/GSI-Utils): Utility codes needed by GSI to create analysis +* **GSI Monitor** (https://github.com/NOAA-EMC/GSI-Monitor): These tools monitor the GSI package's data assimilation, detecting and reporting missing data sources, low observation counts, and high penalty values +* **GDAS** (https://github.com/NOAA-EMC/GDASApp): Jedi based Data Assimilation system. This system is currently being developed for marine Data Assimilation and in time will replace GSI for atmospheric data assimilation as well +* **UFS UTILS** (https://github.com/ufs-community/UFS_UTILS): Utility codes needed for UFS-weather-model +* **Verif global** (https://github.com/NOAA-EMC/EMC_verif-global): Verification package to evaluate GFS parallels. It uses MET and METplus. At this moment the verification package is limited to providing atmospheric metrics only +* **GFS WAFS** (https://github.com/NOAA-EMC/EMC_gfs_wafs): Additional post processing products for Aircrafts + +.. note:: + When running the system in forecast-only mode the Data Assimilation components are not needed and are hence not checked out. + +===================== +External dependencies +===================== + +^^^^^^^^^ +Libraries +^^^^^^^^^ + +All the libraries that are needed to run the end to end Global Workflow are built using a package manager. Currently these are served via HPC-STACK but will soon be available via SPACK-STACK. These libraries are already available on supported NOAA HPC platforms + +Find information on official installations of HPC-STACK here: + +https://github.com/NOAA-EMC/hpc-stack/wiki/Official-Installations + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Observation data (OBSPROC/prep) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +**** +Data +**** + +Observation data, also known as dump data, is prepared in production and then archived in a global dump archive (GDA) for use by users when running cycled experiments. The GDA (identified as ``$DMPDIR`` in the workflow) is available on supported platforms and the workflow system knows where to find the data. + +* Hera: /scratch1/NCEPDEV/global/glopara/dump +* Orion: /work/noaa/rstprod/dump +* Jet: /mnt/lfs4/HFIP/hfv3gfs/glopara/dump +* WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/dump +* S4: /data/prod/glopara/dump + +----------------------------- +Global Dump Archive Structure +----------------------------- + +The global dump archive (GDA) mimics the structure of its production source: ``DMPDIR/CDUMP.PDY/[CC/atmos/]FILES`` + +The ``CDUMP`` is either gdas, gfs, or rtofs. All three contain production output for each day (``PDY``). The gdas and gfs folders are further broken into cycle (``CC``) and component (``atmos``). + +The GDA also contains special versions of some datasets and experimental data that is being evaluated ahead of implementation into production. The following subfolder suffixes exist: + ++--------+------------------------------------------------------------------------------------------------------+ +| SUFFIX | WHAT | ++========+======================================================================================================+ +| nr | Non-restricted versions of restricted files in production. Produced in production. Restriced data is | +| | fully stripped from files. These files remain as is. | ++--------+------------------------------------------------------------------------------------------------------+ +| ur | Un-restricted versions of restricted files in production. Produced and archived on a 48hrs delay. | +| | Some restricted datasets are unrestricted. Data amounts: restricted > un-restricted > non-restricted | ++--------+------------------------------------------------------------------------------------------------------+ +| x | Experimental global datasets being evaluated for production. Dates and types vary depending on | +| | upcoming global upgrades. | ++--------+------------------------------------------------------------------------------------------------------+ +| y | Similar to "x" but only used when there is a duplicate experimental file in the x subfolder with the | +| | same name. These files will be different from both the production versions (if that exists already) | +| | and the x versions. This suffix is rarely used. | ++--------+------------------------------------------------------------------------------------------------------+ +| p | Pre-production copy of full dump dataset, as produced by NCO during final 30-day parallel ahead of | +| | implementation. Not always archived. | ++--------+------------------------------------------------------------------------------------------------------+ + +*************** +Data processing +*************** + +Upstream of the global-workflow is the collection, quality control, and packaging of observed weather. The handling of that data is done by the OBSPROC group codes and scripts. The global-workflow uses two packages from OBSPROC to run its prep step to prepare observation (dump) data for use by the analysis system: + +1. https://github.com/NOAA-EMC/obsproc +2. https://github.com/NOAA-EMC/prepobs + +Package versions and locations on supported platforms are set in the global-workflow system configs, modulefiles, and version files. diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000000..c0f9ca572a --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,111 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = 'Global-workflow' +copyright = '2023, Kate Friedman, Walter Kolczynski, Rahul Mahajan, Lin Gan, Arun Chawla' +author = 'Kate Friedman, Walter Kolczynski, Rahul Mahajan, Lin Gan, Arun Chawla' + +# The full version, including alpha/beta/rc tags +release = '0.1' + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.ifconfig', + 'sphinx.ext.viewcode', + 'sphinx.ext.githubpages', + 'sphinx.ext.napoleon', + 'sphinxcontrib.bibtex' +] + +bibtex_bibfiles = ['references.bib'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' +html_theme_path = ["_themes", ] + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} +html_theme_options = {"body_max_width": "none"} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] +html_context = {} + + +def setup(app): + app.add_css_file('custom.css') # may also be an URL + app.add_css_file('theme_overrides.css') # may also be an URL + + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'Global-Workflow' diff --git a/docs/source/configure.rst b/docs/source/configure.rst new file mode 100644 index 0000000000..477e95cec7 --- /dev/null +++ b/docs/source/configure.rst @@ -0,0 +1,53 @@ +============= +Configure Run +============= + +The global-workflow configs contain switches that change how the system runs. Many defaults are set initially. Users wishing to run with different settings should adjust their $EXPDIR configs and then rerun the ``setup_xml.py`` script since some configuration settings/switches change the workflow/xml ("Adjusts XML" column value is "YES"). + ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| Switch | What | Default | Adjusts XML | More Details | ++================+==============================+===============+=============+===================================================+ +| APP | Model application | ATM | YES | See case block in config.base for options | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| DOIAU | Enable 4DIAU for control | YES | NO | Turned off for cold-start first half cycle | +| | with 3 increments | | | | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| DOHYBVAR | Run EnKF | YES | YES | Don't recommend turning off | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| DONST | Run NSST | YES | NO | If YES, turns on NSST in anal/fcst steps, and | +| | | | | turn off rtgsst | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_AWIPS | Run jobs to produce AWIPS | NO | YES | downstream processing, ops only | +| | products | | | | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_BUFRSND | Run job to produce BUFR | NO | YES | downstream processing | +| | sounding products | | | | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_GEMPAK | Run job to produce GEMPAK | NO | YES | downstream processing, ops only | +| | products | | | | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_VRFY | Run vrfy job | NO | YES | Whether to include vrfy job (GSI monitoring, | +| | | | | tracker, VSDB, fit2obs) | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| DO_METP | Run METplus jobs | YES | YES | One cycle spinup | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| EXP_WARM_START | Is experiment starting warm | .false. | NO | Impacts IAU settings for initial cycle. Can also | +| | (.true.) or cold (.false)? | | | be set when running ``setup_expt.py`` script with | +| | | | | the ``--start`` flag (e.g. ``--start warm``) | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| HPSSARCH | Archive to HPPS | NO | Possibly | Whether to save output to tarballs on HPPS | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| LOCALARCH | Archive to a local directory | NO | Possibly | Instead of archiving data to HPSS, archive to a | +| | | | | local directory, specified by ATARDIR. If | +| | | | | LOCALARCH=YES, then HPSSARCH must =NO. Changing | +| | | | | HPSSARCH from YES to NO will adjust the XML. | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| QUILTING | Use I/O quilting | .true. | NO | If .true. choose OUTPUT_GRID as cubed_sphere_grid | +| | | | | in netcdf or gaussian_grid | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| WAFSF | Run jobs to produce WAFS | NO | YES | downstream processing, ops only | +| | products | | | | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ +| WRITE_DOPOST | Run inline post | .true. | NO | If .true. produces master post output in forecast | +| | | | | job | ++----------------+------------------------------+---------------+-------------+---------------------------------------------------+ diff --git a/docs/source/development.rst b/docs/source/development.rst new file mode 100644 index 0000000000..e95516bcca --- /dev/null +++ b/docs/source/development.rst @@ -0,0 +1,198 @@ +################################### +Contributing to the Global Workflow +################################### + +This section is devoted to developers who wish to contribute to the Global Workflow repository. + +.. _managers: + +============= +Code managers +============= + + * Kate Friedman - @KateFriedman-NOAA / kate.friedman@noaa.gov + * Walter Kolczynski - @WalterKolczynski-NOAA / walter.kolczynski@noaa.gov + +.. _development: + +======================== +Where to do development? +======================== + + * In authoritative (main) repository: + + - Work for upcoming implementation (who: members of global-workflow-developers team) + - Major new features or port work (who: generally code managers and/or members of global-workflow-developers team) + + * In a fork: + + - Everything and everyone else + - How do I fork this repository? See the following GitHub documentation on forking repos: https://help.github.com/en/github/getting-started-with-github/fork-a-repo + +.. _protected: + +================== +Protected branches +================== + +The following global-workflow branches are protected by the code management team: + +* develop (HEAD) +* dev/gfs.v16 (kept aligned with current production, as well as ingests bug fixes and updates between release branches) + +These protected branches require the following to accept changes: + + 1. a pull request with at least 1 reviewer sign-off + 2. a code manager to perform the commit + +Other authoritative repository branches may also be protected at the request of members of the global-workflow-developers team. + +.. _howto: + +============================================= +How to get changes into develop (HEAD) branch +============================================= + +The following steps should be followed in order to make changes to the develop branch of global-workflow. Communication with the code managers throughout the process is encouraged. + + #. Issue - Open issue to document changes. Reference this issue in commits to your branches (e.g. ``git commit -m "Issue #23 - blah changes for what-not code"``) Click `here `__ to open a new global-workflow issue. + #. GitFlow - Follow `GitFlow `_ procedures for development (branch names, forking vs branching, etc.). Read more `here `__ about GitFlow at EMC. + #. To fork or not to fork? - If not working within authoritative repository create a fork of the authoritative repository. Read more `here `__ about forking in GitHub. + #. Branch - Create branch in either authoritative repository or fork of authoritative repository. See the `Where to do development? `_ section for how to determine where. Follow GitFlow conventions when creating branch. + #. Development - Perform and test changes in branch. Document work in issue and mention issue number in commit messages to link your work to the issue. See `Commit Messages `_ section below. Depending on changes the code manager may request or perform additional pre-commit tests. + #. Pull request - When ready to merge changes back to develop branch, the lead developer should initiate a pull request (PR) of your branch (either fork or not) into the develop branch. Read `here `__ about pull requests in GitHub. Provide some information about the PR in the proper field, add at least one reviewer to the PR and assign the PR to a code manager. + #. Complete - When review and testing is complete the code manager will complete the pull request and subsequent merge/commit. + #. Cleanup - When complete the lead developer should delete the branch and close the issue. "Closing keywords" can be used in the PR to automatically close associated issues. + +.. _development-tools: + +================= +Development Tools +================= + +See the ``/test`` folder in global-workflow for available development and testing tools. + +---------------- +Comparison Tools +---------------- + +There are several scripts to compare output between two experiments (e.g. control and test). See scripts under ``/test`` folder and read `README` there for information on how to use them. + +.. _code-standards: + +============== +Code standards +============== + +All scripts should be in either bash or python 3. + +We have adopted the `Google style guide `_ for shell scripts and `PEP-8 `_ for python. Python code should additionally have docstrings following `numpy style `_. + +All new code after 2022 Sep 1 will be required to meet these standards. We will slowly be updating existing scripts to comply with the standards. We are also in the process of adding GitHub actions to automatically lint code submitted for PRs. + +.. _commit-standards: + +====================== +Pull request standards +====================== + +Pull requests should follow the pre-filled template provided when you open the PR. PR titles and descriptions become the commit message when the PR is squashed and merged, so we ask that they follow best practices for commit messages: + + * Limit the subject line (PR title) to 50 characters + * Capitalize the subject line + * Do not end the subject line with a period + * Use the `imperative mood `_ in the subject line + * Use the body to explain what and why vs. how + * The final line of the commit message should include tags to relevant issues (e.g. ``Refs: #217, #300``) + +This list is a modified version of the one provided at https://chris.beams.io/posts/git-commit/ with a couple removed that are not relevant to GitHub PRs. That source also provides the motivation for making sure we have good commit messages. + +Here is the example commit message from the article linked above; it includes descriptions of what would be in each part of the commit message for guidance: + +:: + + Summarize changes in around 50 characters or less + + More detailed explanatory text, if necessary. Wrap it to about 72 + characters or so. In some contexts, the first line is treated as the + subject of the commit and the rest of the text as the body. The + blank line separating the summary from the body is critical (unless + you omit the body entirely); various tools like `log`, `shortlog` + and `rebase` can get confused if you run the two together. + + Explain the problem that this commit is solving. Focus on why you + are making this change as opposed to how (the code explains that). + Are there side effects or other unintuitive consequences of this + change? Here's the place to explain them. + + Further paragraphs come after blank lines. + + - Bullet points are okay, too + + - Typically a hyphen or asterisk is used for the bullet, preceded + by a single space, with blank lines in between, but conventions + vary here + + If you use an issue tracker, put references to them at the bottom, + like this: + + Resolves: #123 + See also: #456, #789 + +A detailed commit message is very useful for documenting changes. + +.. _sync: + +================================================== +How to sync fork with the authoritative repository +================================================== + +As development in the main authoritative repository moves forward you will need to sync your fork branches to stay up-to-date. Below is an example of how to sync your fork copy of a branch with the authoritative repository copy. The branch name for the example will be "feature/new_thing". Click `here `__ for documentation on syncing forks. + +1. Clone your fork and checkout branch that needs syncing: + +:: + + git clone https://github.com/JoeSchmo-NOAA/global-workflow.git ./fork + cd fork + git checkout feature/my_new_thing + +2. Add upstream info to your clone so it knows where to merge from. The term "upstream" refers to the authoritative repository from which the fork was created. + +:: + + git remote add upstream https://github.com/NOAA-EMC/global-workflow.git + +3. Fetch upstream information into clone: + +:: + + git fetch upstream + +Later on you can update your fork remote information by doing the following command: + +:: + + git remote update + +4. Merge upstream ``feature/other_new_thing`` into your branch: + +:: + + git merge upstream/feature/other_new_thing + +5. Resolve any conflicts and perform any needed "add"s or "commit"s for conflict resolution. + +6. Push the merged copy back up to your fork (origin): + +:: + + git push origin feature/my_new_thing + +Done! + +Moving forward you'll want to perform the "remote update" command regularly to update the metadata for the remote/upstream repository in your fork (e.g. pull in metadata for branches made in auth repo after you forked it). + +:: + + git remote update diff --git a/docs/source/errors_faq.rst b/docs/source/errors_faq.rst new file mode 100644 index 0000000000..2660a01e60 --- /dev/null +++ b/docs/source/errors_faq.rst @@ -0,0 +1,45 @@ +========================== +Common Errors Known Issues +========================== + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Error: "ImportError" message when running setup script +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Example:: + + $ ./setup_xml.py /path/to/your/experiment/directory + /usr/bin/env: python3: No such file or directory + +**Cause:** Missing python module in your environment + +**Solution:** Load a python module ("module load python") and retry setup script. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Error: curses default colors when running viewer +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Example:: + + $ ./rocoto_viewer.py -d blah.db -w blah.xml + Traceback (most recent call last): + File "./rocoto_viewer.py", line 2376, in + curses.wrapper(main) + File "/contrib/anaconda/anaconda2/4.4.0/lib/python2.7/curses/wrapper.py", line 43, in wrapper + return func(stdscr, *args, **kwds) + File "./rocoto_viewer.py", line 1202, in main + curses.use_default_colors() + _curses.error: use_default_colors() returned ERR + +**Cause:** wrong TERM setting for curses + +**Solution:** set TERM to "xterm" (bash: export TERM=xterm ; csh/tcsh: setenv TERM xterm) + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Issue: Directory name change for EnKF folder in COMROT +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +**Issue:** The EnKF COMROT folders were renamed during the GFS v15 development process to remove the period between "enkf" and "gdas": enkf.gdas.$PDY → enkfgdas.$PDY + +**Fix:** Older tarballs on HPSS will have the older directory name with the period between 'enkf' and 'gdas'. Make sure to rename folder to 'enkfgdas.$PDY' after obtaining. Only an issue for the initial cycle. + diff --git a/docs/source/hpc.rst b/docs/source/hpc.rst new file mode 100644 index 0000000000..da54f29521 --- /dev/null +++ b/docs/source/hpc.rst @@ -0,0 +1,125 @@ +##################### +HPC Settings and Help +##################### + +Running the GFS configurations (or almost any global workflow configuration except the coarsest) is a resource intensive exercise. This page discusses recommended HPC environmental settings and contact information in case you need assistance from a particular HPC helpdesk. While most of the documentation is based on supported NOAA platforms, the learnings here can hopefully apply to other platforms. + +================================ +Experiment troubleshooting help +================================ + +Users may email Kate Friedman (kate.friedman@noaa.gov) questions or requests for troubleshooting assistance with their global-workflow experiments/parallels on supported platforms. For troubleshooting, please provide a brief description of the issue(s) and include relevant error messages and/or paths to logs for failed jobs. + +Any issues related to HPC/machine problems, and which are unrelated to the workflow itself, should go to the appropriate HPC helpdesk. + +============= +HPC helpdesks +============= + +* WCOSS2: hpc.wcoss2-help@noaa.gov +* Hera: rdhpcs.hera.help@noaa.gov +* Orion: rdhpcs.orion.help@noaa.gov +* HPSS: rdhpcs.hpss.help@noaa.gov +* Gaea: oar.gfdl.help@noaa.gov +* S4: david.huber@noaa.gov +* Jet: rdhpcs.jet.help@noaa.gov + +====================== +Restricted data access +====================== + +The GFS system ingests dump data files that contain global observation data. A number of these dump files contain restricted data which means those files come with an extra level of permissions called restricted or ‘rstprod’. Users who wish to run cycled GFS experiments, which both utilizes restricted observation data and produces output containing restricted data, will need to gain rstprod group access. + +NOTE: Only non-restricted data is available on S4. + +To request rstprod access, do either a and/or b below: + +a) If you need restricted data access on WCOSS2, read details about restricted data and fill out form here: + +https://www.nco.ncep.noaa.gov/sib/restricted_data/restricted_data_sib/ + +b) If you need restricted data access on RDHPCS systems: go to the AIM system, click on "Request new access to a project", select the rstprod project, provide justification for needed access, and submit the request: + +https://aim.rdhpcs.noaa.gov/ + +==================================== +Optimizing the global workflow on S4 +==================================== + +The S4 cluster is relatively small and so optimizations are recommended to improve cycled runtimes. Please contact David Huber (david.huber@noaa.gov) if you are planning on running a cycled experiment on this system to obtain optimized configuration files. + +============ +Git settings +============ + +^^^^^^ +Merges +^^^^^^ + +Use the following command to have merge commits include the one-line description of all the commits being merged (up to 200). You only need to do this once on each machine; it will be saved to your git settings:: + + git config --global merge.log 200 + +Use the ``--no-ff`` option to make sure there is always a merge commit when a fast-forward only is available. Exception: If the merge contains only a single commit, it can be applied as a fast-forward. + +For any merge with multiple commits, a short synopsis of the merge should appear between the title and the list of commit titles added by merge.log. + +^^^^^^^ +Version +^^^^^^^ + +It is advised to use Git v2+ when available. At the time of writing this documentation the default Git clients on the different machines were as noted in the table below. It is recommended that you check the default modules before loading recommended ones: + ++---------+----------+---------------------------------------+ +| Machine | Default | Recommended | ++---------+----------+---------------------------------------+ +| Hera | v2.18.0 | default | ++---------+----------+---------------------------------------+ +| Orion | v1.8.3.1 | **module load git/2.28.0** | ++---------+----------+---------------------------------------+ +| Jet | v2.18.0 | default | ++---------+----------+---------------------------------------+ +| WCOSS2 | v2.26.2 | default or **module load git/2.29.0** | ++---------+----------+---------------------------------------+ +| S4 | v1.8.3.1 | **module load git/2.30.0** | ++---------+----------+---------------------------------------+ + +^^^^^^^^^^^^^ +Output format +^^^^^^^^^^^^^ + +For proper display of Git command output (e.g. git branch and git diff) type the following once per machine: + +:: + + git config --global core.pager 'less -FRX' + +For the manage_externals utility functioning:: + + Error: fatal: ssh variant 'simple' does not support setting port + Fix: git config --global ssh.variant ssh + +======================================== +Stacksize on R&Ds (Hera, Orion, Jet, S4) +======================================== + +Some GFS components, like the UPP, need an unlimited stacksize. Add the following setting into your appropriate .*rc file to support these components: + +csh:: + + limit stacksize unlimited + +sh/bash/ksh:: + + ulimit -s unlimited + +========================================= +Forecast hangs due to issue with ssh-keys +========================================= + +Did you generate your ssh-keys with a passphrase? If so, remake them without one. To test this try ssh-ing to a different login node; you should be able to without being prompted for your passphrase. + +Is your public key in the authorized_keys file? If not, add it:: + + cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys + diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000000..e254a83fa2 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,39 @@ + +############### +Global Workflow +############### + +**Global-workflow** is the end-to-end workflow designed to run global configurations of medium range weather forecasting for the UFS weather model. It supports both development and operational implementations. In its current format it supports the Global Forecast System (GFS) and the Global Ensemble Forecast System (GEFS) configurations + +====== +Status +====== + +* State of develop (HEAD) branch: GFSv17+ development +* State of operations (dev/gfs.v16 branch): GFS v16.3.7 `tag: [gfs.v16.3.7] `_ + +============= +Code managers +============= + +* Kate Friedman - @KateFriedman-NOAA / kate.friedman@noaa.gov +* Walter Kolczynski - @WalterKolczynski-NOAA / walter.kolczynski@noaa.gov + +============= +Announcements +============= + +General updates: NOAA employees and affiliates can join the gfs-announce distribution list to get updates on the GFS and global-workflow. Contact Kate Friedman (kate.friedman@noaa.gov) and Walter Kolczynski (walter.kolczynski@noaa.gov) to get added to the list or removed from it. + +GitHub updates: Users should adjust their "Watch" settings for this repo so they receive notifications as they'd like to. Find the "Watch" or "Unwatch" button towards the top right of the `authoritative global-workflow repository page `_ and click it to adjust how you watch the repo. + +.. toctree:: + :numbered: + :maxdepth: 3 + + development.rst + components.rst + jobs.rst + hpc.rst + output.rst + run.rst diff --git a/docs/source/init.rst b/docs/source/init.rst new file mode 100644 index 0000000000..5c9c811052 --- /dev/null +++ b/docs/source/init.rst @@ -0,0 +1,606 @@ +================== +Initial Conditions +================== + +There are two types of initial conditions for the global-workflow: + +#. Warm start: these ICs are taken directly from either the GFS in production or an experiment "warmed" up (at least one cycle in). +#. Cold start: any ICs converted to a new resolution or grid (e.g. C768 -> C384). These ICs are often prepared by chgres_cube (change resolution utility). + +Most users will initiate their experiments with cold start ICs unless running high resolution (C768 deterministic with C384 EnKF) for a date with warm starts available. It is `not recommended` to run high resolution unless required or as part of final testing. + +Atmosphere Resolutions: + +* C48 = 2 degree ≈ 200km +* C96 = 1 degree ≈ 100km +* C192 = 1/2 degree ≈ 50km +* C384 = 1/4 degree ≈ 25km +* C768 = 1/8 degree ≈ 13km +* C1152 ≈ 9km +* C3072 ≈ 3km + +Supported atmosphere resolutions in global-workflow: C48, C96, C192, C384, C768 + +Ocean Resolutions: + +* mx500 = 5 degree +* mx100 = 1 degree +* mx050 = 1/2 degree +* mx025 = 1/4 degree + +Supported ocean resolutions in global-workflow: mx500, mx100 + +^^^^^^^^^^^^^^^^^^^^^^^^^ +Staged Initial Conditions +^^^^^^^^^^^^^^^^^^^^^^^^^ + +* :ref:`Cycled ATM-only` +* :ref:`Cycled ATM w/ Coupled (S2S) model` +* :ref:`Prototype` + +.. _staged_ics_cycled_atmonly: + +*************** +Cycled ATM-only +*************** + +Cold-start atmosphere-only cycled C96 deterministic C48 enkf (80 members) ICs are available in the following locations on supported platforms: + +:: + + Hera: /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C96C48 + Orion: /work/noaa/global/glopara/data/ICSDIR/C96C48 + WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C96C48 + +Start date = 2021122018 + +:: + + -bash-4.2$ tree /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C96C48/ + |-- enkfgdas.20211220 + | `-- 18 + | |-- mem### (where ### = 001 -> 080) + | | `-- atmos + | | `-- INPUT + | | |-- gfs_ctrl.nc + | | |-- gfs_data.tile1.nc + | | |-- gfs_data.tile2.nc + | | |-- gfs_data.tile3.nc + | | |-- gfs_data.tile4.nc + | | |-- gfs_data.tile5.nc + | | |-- gfs_data.tile6.nc + | | |-- sfc_data.tile1.nc + | | |-- sfc_data.tile2.nc + | | |-- sfc_data.tile3.nc + | | |-- sfc_data.tile4.nc + | | |-- sfc_data.tile5.nc + | | `-- sfc_data.tile6.nc + `-- gdas.20211220 + `-- 18 + `-- atmos + |-- INPUT + | |-- gfs_ctrl.nc + | |-- gfs_data.tile1.nc + | |-- gfs_data.tile2.nc + | |-- gfs_data.tile3.nc + | |-- gfs_data.tile4.nc + | |-- gfs_data.tile5.nc + | |-- gfs_data.tile6.nc + | |-- sfc_data.tile1.nc + | |-- sfc_data.tile2.nc + | |-- sfc_data.tile3.nc + | |-- sfc_data.tile4.nc + | |-- sfc_data.tile5.nc + | `-- sfc_data.tile6.nc + |-- gdas.t18z.abias + |-- gdas.t18z.abias_air + |-- gdas.t18z.abias_pc + `-- gdas.t18z.radstat + +.. _staged_ics_cycled_coupled: + +********************************* +Cycled ATM w/ Coupled (S2S) model +********************************* + +Warm-start cycled w/ coupled (S2S) model C48 atmosphere C48 enkf (80 members) 5 degree ocean/ice ICs are available in the following locations on supported platforms: + +:: + + Hera: /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C48C48mx500 + Orion: /work/noaa/global/glopara/data/ICSDIR/C48C48mx500 + WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/data/ICSDIR/C48C48mx500 + Jet: /lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/C48C48mx500 + +Start date = 2021032312 + +.. note:: + The EnKF member ICs are dummy duplicates of the deterministic at the moment. + +:: + + -bash-4.2$ tree /scratch1/NCEPDEV/global/glopara/data/ICSDIR/C48C48mx500 + ├── enkfgdas.20210323 + │   ├── 06 + │   │   ├── mem001 + │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   │   ├── mem002 + │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   │   ├── mem003 + │   │   │   └── model_data -> ../../../gdas.20210323/06/model_data + ... + │   │   └── mem080 + │   │   └── model_data -> ../../../gdas.20210323/06/model_data + │   └── 12 + │   ├── mem001 + │   │   └── analysis + │   │   └── ocean + │   │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc + │   ├── mem002 + │   │   └── analysis + │   │   └── ocean + │   │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc + │   ├── mem003 + │   │   └── analysis + │   │   └── ocean + │   │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc + ... + │   └── mem080 + │   └── analysis + │   └── ocean + │   └── gdas.t12z.ocninc.nc -> ../../../../../gdas.20210323/12/analysis/ocean/gdas.t12z.ocninc.nc + └── gdas.20210323 + ├── 06 + │   └── model_data + │   ├── atmos + │   │   └── restart + │   │   ├── 20210323.120000.ca_data.tile1.nc + │   │   ├── 20210323.120000.ca_data.tile2.nc + │   │   ├── 20210323.120000.ca_data.tile3.nc + │   │   ├── 20210323.120000.ca_data.tile4.nc + │   │   ├── 20210323.120000.ca_data.tile5.nc + │   │   ├── 20210323.120000.ca_data.tile6.nc + │   │   ├── 20210323.120000.coupler.res + │   │   ├── 20210323.120000.fv_core.res.nc + │   │   ├── 20210323.120000.fv_core.res.tile1.nc + │   │   ├── 20210323.120000.fv_core.res.tile2.nc + │   │   ├── 20210323.120000.fv_core.res.tile3.nc + │   │   ├── 20210323.120000.fv_core.res.tile4.nc + │   │   ├── 20210323.120000.fv_core.res.tile5.nc + │   │   ├── 20210323.120000.fv_core.res.tile6.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile1.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile2.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile3.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile4.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile5.nc + │   │   ├── 20210323.120000.fv_srf_wnd.res.tile6.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile1.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile2.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile3.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile4.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile5.nc + │   │   ├── 20210323.120000.fv_tracer.res.tile6.nc + │   │   ├── 20210323.120000.phy_data.tile1.nc + │   │   ├── 20210323.120000.phy_data.tile2.nc + │   │   ├── 20210323.120000.phy_data.tile3.nc + │   │   ├── 20210323.120000.phy_data.tile4.nc + │   │   ├── 20210323.120000.phy_data.tile5.nc + │   │   ├── 20210323.120000.phy_data.tile6.nc + │   │   ├── 20210323.120000.sfc_data.tile1.nc + │   │   ├── 20210323.120000.sfc_data.tile2.nc + │   │   ├── 20210323.120000.sfc_data.tile3.nc + │   │   ├── 20210323.120000.sfc_data.tile4.nc + │   │   ├── 20210323.120000.sfc_data.tile5.nc + │   │   └── 20210323.120000.sfc_data.tile6.nc + │   ├── ice + │   │   └── restart + │   │   └── 20210323.120000.cice_model.res.nc + │   ├── med + │   │   └── restart + │   │   └── 20210323.120000.ufs.cpld.cpl.r.nc + │   └── ocean + │   └── restart + │   └── 20210323.120000.MOM.res.nc + └── 12 + └── analysis + ├── atmos + │   ├── gdas.t12z.abias + │   ├── gdas.t12z.abias_air + │   ├── gdas.t12z.abias_int + │   ├── gdas.t12z.abias_pc + │   └── gdas.t12z.radstat + └── ocean + └── gdas.t12z.ocninc.nc + +.. _staged_ics_prototype: + +********* +Prototype +********* + +Forecast-only P8 prototype initial conditions are made available to users on supported platforms in the following locations: + +:: + + WCOSS2: /lfs/h2/emc/global/noscrub/emc.global/IC/COUPLED + HERA: /scratch1/NCEPDEV/climate/role.ufscpara/IC + ORION: /work/noaa/global/glopara/data/ICSDIR/prototype_ICs + JET: /mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs + S4: /data/prod/glopara/coupled_ICs + +These locations are known within the workflow via paths set in ``parm/config/config.coupled_ic``. + +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Prepare Initial Conditions +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. _automated-generation: + +******************** +Automated Generation +******************** + +.. _cycled: + +----------- +Cycled mode +----------- + +Not yet supported. See :ref:`Manual Generation` section below for how to create your ICs yourself (outside of workflow). + +.. _forecastonly-coupled: + +--------------------- +Forecast-only coupled +--------------------- +Coupled initial conditions are currently only generated offline and copied prior to the forecast run. Prototype initial conditions will automatically be used when setting up an experiment as an S2SW app, there is no need to do anything additional. Copies of initial conditions from the prototype runs are currently maintained on Hera, Orion, Jet, and WCOSS2. The locations used are determined by ``parm/config/config.coupled_ic``. If you need prototype ICs on another machine, please contact Walter (Walter.Kolczynski@noaa.gov). + +.. _forecastonly-atmonly: + +----------------------------- +Forecast-only mode (atm-only) +----------------------------- + +The table below lists the needed initial condition files from past GFS versions to be used by the UFS_UTILS gdas_init utility. The utility will pull these files for you. See the next section (Manual Generation) for how to run the UFS_UTILS gdas_init utility and create initial conditions for your experiment. + +Note for table: yyyy=year; mm=month; dd=day; hh=cycle + +Operations/production output location on HPSS: /NCEPPROD/hpssprod/runhistory/rh ``yyyy``/``yyyymm``/``yyyymmdd``/ + ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| Source | Files | Tarball name | Where in ROTDIR | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| v12 ops | gfs.t. ``hh`` z.sanl | com_gfs_prod_gfs. ``yyyymmddhh`` .anl.tar | gfs. ``yyyymmdd`` /``hh`` | +| | | | | +| | gfs.t. ``hh`` z.sfcanl | | | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| v13 ops | gfs.t. ``hh`` z.sanl | com2_gfs_prod_gfs. ``yyyymmddhh`` .anl.tar | gfs. ``yyyymmdd`` /``hh`` | +| | | | | +| | gfs.t. ``hh`` z.sfcanl | | | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| v14 ops | gfs.t. ``hh`` z.atmanl.nemsio | gpfs_hps_nco_ops_com_gfs_prod_gfs. ``yyyymmddhh`` .anl.tar | gfs. ``yyyymmdd`` /``hh`` | +| | | | | +| | gfs.t. ``hh`` z.sfcanl.nemsio | | | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| v15 ops | gfs.t. ``hh`` z.atmanl.nemsio | gpfs_dell1_nco_ops_com_gfs_prod_gfs. ``yyyymmdd`` _ ``hh`` .gfs_nemsioa.tar | gfs. ``yyyymmdd`` /``hh`` | +| | | | | +| pre-2020022600 | gfs.t. ``hh`` z.sfcanl.nemsio | | | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| v15 ops | gfs.t. ``hh`` z.atmanl.nemsio | com_gfs_prod_gfs. ``yyyymmdd`` _ ``hh`` .gfs_nemsioa.tar | gfs. ``yyyymmdd`` /``hh`` | +| | | | | +| | gfs.t. ``hh`` z.sfcanl.nemsio | | | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| v16 retro | gfs.t. ``hh`` z.atmanl.nc | gfs_netcdfa.tar* | gfs. ``yyyymmdd`` /``hh``/atmos| +| | | | | +| | gfs.t. ``hh`` z.sfcanl.nc | | | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| v16.0[1] ops | gfs.t. ``hh`` z.atmanl.nc | com_gfs_prod_gfs. ``yyyymmdd`` _ ``hh`` .gfs_nca.tar | gfs. ``yyyymmdd`` /``hh``/atmos| +| | | | | +| | gfs.t. ``hh`` z.sfcanl.nc | | | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ +| v16.2[3]+ ops | gfs.t. ``hh`` z.atmanl.nc | com_gfs_ ``gfs_ver`` _gfs. ``yyyymmdd`` _ ``hh`` .gfs_nca.tar | gfs. ``yyyymmdd`` /``hh``/atmos| +| | | | | +| | gfs.t. ``hh`` z.sfcanl.nc | | | ++----------------+---------------------------------+-----------------------------------------------------------------------------+--------------------------------+ + +For HPSS path, see retrospective table in :ref:`pre-production parallel section ` below + +.. _manual-generation: + +***************** +Manual Generation +***************** + +.. note:: + Initial conditions cannot be generated on S4. These must be generated on another supported platform then pushed to S4. If you do not have access to a supported system or need assistance, please contact David Huber (david.huber@noaa.gov). + +.. _coldstarts: + +The following information is for users needing to generate cold-start initial conditions for a cycled experiment that will run at a different resolution or layer amount than the operational GFS (C768C384L127). + +The ``chgres_cube`` code is available from the `UFS_UTILS repository `_ on GitHub and can be used to convert GFS ICs to a different resolution or number of layers. Users may clone the develop/HEAD branch or the same version used by global-workflow develop (found in ``sorc/checkout.sh``). The ``chgres_cube`` code/scripts currently support the following GFS inputs: + +* pre-GFSv14 +* GFSv14 +* GFSv15 +* GFSv16 + +Users can use the copy of UFS_UTILS that is already cloned and built within their global-workflow clone or clone/build it separately: + +Within a built/linked global-workflow clone: + +:: + + cd sorc/ufs_utils.fd/util/gdas_init + +Clone and build separately: + +1. Clone UFS_UTILS: + +:: + + git clone --recursive https://github.com/NOAA-EMC/UFS_UTILS.git + +Then switch to a different tag or use the default branch (develop). + +2. Build UFS_UTILS: + +:: + + sh build_all.sh + cd fix + sh link_fixdirs.sh emc $MACHINE + +where ``$MACHINE`` is ``wcoss2``, ``hera``, or ``jet``. + +.. note:: + UFS-UTILS builds on Orion but due to the lack of HPSS access on Orion the ``gdas_init`` utility is not supported there. + +3. Configure your conversion: + +:: + + cd util/gdas_init + vi config + +Read the doc block at the top of the config and adjust the variables to meet you needs (e.g. ``yy, mm, dd, hh`` for ``SDATE``). + +Most users will want to adjust the following ``config`` settings for the current system design: + +#. EXTRACT_DATA=YES (to pull original ICs to convert off HPSS) +#. RUN_CHGRES=YES (to run chgres_cube on the original ICs pulled off HPSS) +#. LEVS=128 (for the L127 GFS) + +4. Submit conversion script: + +:: + + ./driver.$MACHINE.sh + +where ``$MACHINE`` is currently ``wcoss2``, ``hera`` or ``jet``. Additional options will be available as support for other machines expands. + +.. note:: + UFS-UTILS builds on Orion but due to lack of HPSS access there is no ``gdas_init`` driver for Orion nor support to pull initial conditions from HPSS for the ``gdas_init`` utility. + +Several small jobs will be submitted: + + - 1 jobs to pull inputs off HPSS + - 1 or 2 jobs to run ``chgres_cube`` (1 for deterministic/hires and 1 for each EnKF ensemble member) + +The chgres jobs will have a dependency on the data-pull jobs and will wait to run until all data-pull jobs have completed. + +5. Check output: + +In the config you will have defined an output folder called ``$OUTDIR``. The converted output will be found there, including the needed abias and radstat initial condition files (if CDUMP=gdas). The files will be in the needed directory structure for the global-workflow system, therefore a user can move the contents of their ``$OUTDIR`` directly into their ``$ROTDIR/$COMROT``. + +Please report bugs to George Gayno (george.gayno@noaa.gov) and Kate Friedman (kate.friedman@noaa.gov). + +.. _warmstarts-prod: + +***************************** +Warm starts (from production) +***************************** + +Output and warm start initial conditions from the operational GFS (FV3GFS) are saved on HPSS. Users can pull these warm start initial conditions from tape for their use in running operational resolution experiments. + +See production output in the following location on HPSS: + +``/NCEPPROD/hpssprod/runhistory/rhYYYY/YYYYMM/YYYYMMDD`` + +Example location for January 2nd 2023: + +``/NCEPPROD/hpssprod/runhistory/rh2023/202301/20230102`` + +Example listing for January 2nd 2023 00z (2023010200) production tarballs: + +:: + + -bash-4.2$ hpsstar dir /NCEPPROD/hpssprod/runhistory/rh2023/202301/20230102 | grep gfs | grep _00. | grep -v idx + [connecting to hpsscore1.fairmont.rdhpcs.noaa.gov/1217] + -rw-r----- 1 nwprod rstprod 34824086016 Jan 4 03:31 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas.tar + -rw-r--r-- 1 nwprod prod 219779890688 Jan 4 04:04 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas_restart_grp1.tar + -rw-r--r-- 1 nwprod prod 219779921408 Jan 4 04:13 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas_restart_grp2.tar + -rw-r--r-- 1 nwprod prod 219775624192 Jan 4 04:23 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas_restart_grp3.tar + -rw-r--r-- 1 nwprod prod 219779726848 Jan 4 04:33 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas_restart_grp4.tar + -rw-r--r-- 1 nwprod prod 219777990656 Jan 4 04:42 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas_restart_grp5.tar + -rw-r--r-- 1 nwprod prod 219780963328 Jan 4 04:52 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas_restart_grp6.tar + -rw-r--r-- 1 nwprod prod 219775471104 Jan 4 05:02 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas_restart_grp7.tar + -rw-r--r-- 1 nwprod prod 219779499008 Jan 4 05:11 com_gfs_v16.3_enkfgdas.20230102_00.enkfgdas_restart_grp8.tar + -rw-r----- 1 nwprod rstprod 2287770624 Jan 4 02:07 com_gfs_v16.3_gdas.20230102_00.gdas.tar + -rw-r--r-- 1 nwprod prod 1026611200 Jan 4 02:07 com_gfs_v16.3_gdas.20230102_00.gdas_flux.tar + -rw-r--r-- 1 nwprod prod 91233038336 Jan 4 02:16 com_gfs_v16.3_gdas.20230102_00.gdas_nc.tar + -rw-r--r-- 1 nwprod prod 10865070592 Jan 4 02:08 com_gfs_v16.3_gdas.20230102_00.gdas_pgrb2.tar + -rw-r----- 1 nwprod rstprod 69913956352 Jan 4 02:11 com_gfs_v16.3_gdas.20230102_00.gdas_restart.tar + -rw-r--r-- 1 nwprod prod 18200814080 Jan 4 02:17 com_gfs_v16.3_gdas.20230102_00.gdaswave_keep.tar + -rw-r--r-- 1 nwprod prod 5493360128 Jan 4 02:18 com_gfs_v16.3_gfs.20230102_00.gfs.tar + -rw-r--r-- 1 nwprod prod 62501531648 Jan 4 02:21 com_gfs_v16.3_gfs.20230102_00.gfs_flux.tar + -rw-r--r-- 1 nwprod prod 121786191360 Jan 4 02:41 com_gfs_v16.3_gfs.20230102_00.gfs_nca.tar + -rw-r--r-- 1 nwprod prod 130729495040 Jan 4 02:48 com_gfs_v16.3_gfs.20230102_00.gfs_ncb.tar + -rw-r--r-- 1 nwprod prod 138344908800 Jan 4 02:29 com_gfs_v16.3_gfs.20230102_00.gfs_pgrb2.tar + -rw-r--r-- 1 nwprod prod 59804635136 Jan 4 02:32 com_gfs_v16.3_gfs.20230102_00.gfs_pgrb2b.tar + -rw-r--r-- 1 nwprod prod 25095460864 Jan 4 02:34 com_gfs_v16.3_gfs.20230102_00.gfs_restart.tar + -rw-r--r-- 1 nwprod prod 21573020160 Jan 4 02:49 com_gfs_v16.3_gfs.20230102_00.gfswave_output.tar + -rw-r--r-- 1 nwprod prod 32850422784 Jan 4 02:51 com_gfs_v16.3_gfs.20230102_00.gfswave_raw.tar + -rw-r----- 1 nwprod rstprod 7419548160 Jan 4 05:15 com_obsproc_v1.1_gfs.20230102_00.obsproc_gfs.tar + +The warm starts and other output from production are at C768 deterministic and C384 EnKF. The warm start files must be converted to your desired resolution(s) using ``chgres_cube`` if you wish to run a different resolution. If you are running a C768C384L127 experiment you can use them as is. + +------------------------------------------------------------------------------------------ +What files should you pull for starting a new experiment with warm starts from production? +------------------------------------------------------------------------------------------ + +That depends on what mode you want to run -- forecast-only or cycled. Whichever mode, navigate to the top of your ``COMROT`` and pull the entirety of the tarball(s) listed below for your mode. The files within the tarball are already in the ``$CDUMP.$PDY/$CYC/$ATMOS`` folder format expected by the system. + +For forecast-only there are two tarballs to pull + +1. File #1 (for starting cycle SDATE): + +:: + + /NCEPPROD/hpssprod/runhistory/rhYYYY/YYYYMM/YYYYMMDD/com_gfs_vGFSVER_gfs.YYYYMMDD_CC.gfs_restart.tar + +...where ``GFSVER`` is the version of the GFS (e.g. "16.3"). + +2. File #2 (for prior cycle GDATE=SDATE-06): + +:: + + /NCEPPROD/hpssprod/runhistory/rhYYYY/YYYYMM/YYYYMMDD/com_gfs_vGFSVER_gdas.YYYYMMDD_CC.gdas_restart.tar + +...where ``GFSVER`` is the version of the GFS (e.g. "16.3"). + +For cycled mode there 18 tarballs to pull (9 for SDATE and 9 for GDATE (SDATE-06)): + +:: + + HPSS path: /NCEPPROD/hpssprod/runhistory/rhYYYY/YYYYMM/YYYYMMDD/ + +Tarballs per cycle: + +:: + + com_gfs_vGFSVER_gdas.YYYYMMDD_CC.gdas_restart.tar + com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp1.tar + com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp2.tar + com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp3.tar + com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp4.tar + com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp5.tar + com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp6.tar + com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp7.tar + com_gfs_vGFSVER_enkfgdas.YYYYMMDD_CC.enkfgdas_restart_grp8.tar + +Go to the top of your ``COMROT/ROTDIR`` and pull the contents of all tarballs there. The tarballs already contain the needed directory structure. + +.. _warmstarts-preprod-parallels: + +******************************************* +Warm starts (from pre-production parallels) +******************************************* + +Recent pre-implementation parallel series was for GFS v16 (implemented March 2021). For the prior v15 (Q2FY19) see an additional table below. + +* **What resolution are warm-starts available for?** Warm-start ICs are saved at the resolution the model was run at (C768/C384) and can only be used to run at the same resolution combination. If you need to run a different resolution you will need to make your own cold-start ICs. See cold start section above. +* **What dates have warm-start files saved?** Unfortunately the frequency changed enough during the runs that it’s not easy to provide a definitive list easily. +* **What files?** All warm-starts are saved in separate tarballs which include “restart” in the name. You need to pull the entirety of each tarball, all files included in the restart tarballs are needed. +* **Where are these tarballs?** See below for the location on HPSS for each v16 pre-implementation parallel. +* **What tarballs do I need to grab for my experiment?** Tarballs from two cycles are required. The tarballs are listed below, where $CDATE is your starting cycle and $GDATE is one cycle prior. + + - Forecast-only + + ../$CDATE/gfs_restarta.tar + + ../$GDATE/gdas_restartb.tar + - Cycled w/EnKF + + ../$CDATE/gdas_restarta.tar + + ../$CDATE/enkfgdas_restarta_grp##.tar (where ## is 01 through 08) (note, older tarballs may include a period between enkf and gdas: "enkf.gdas") + + ../$GDATE/gdas_restartb.tar + + ../$GDATE/enkfgdas_restartb_grp##.tar (where ## is 01 through 08) (note, older tarballs may include a period between enkf and gdas: "enkf.gdas") + +* **Where do I put the warm-start initial conditions?** Extraction should occur right inside your COMROT. You may need to rename the enkf folder (enkf.gdas.$PDY -> enkfgdas.$PDY). + +Due to a recent change in the dycore, you may also need an additional offline step to fix the checksum of the NetCDF files for warm start. See the :ref:`Fix netcdf checksum section `. + +.. _retrospective: + +-------------------------------------------------------------- +GFSv16 (March 2021) Pre-Implementation Parallel HPSS Locations +-------------------------------------------------------------- + ++-----------------------------+---------------+--------------------------------------------------+ +| Time Period | Parallel Name | Archive Location on HPSS | +| | | PREFIX=/NCEPDEV/emc-global/5year/emc.glopara | ++-----------------------------+---------------+--------------------------------------------------+ +| 2019050106 ~ 2019060100 | v16retro0e | $PREFIX/WCOSS_D/gfsv16/v16retro0e/``yyyymmddhh`` | ++-----------------------------+---------------+--------------------------------------------------+ +| 2019060106 ~ 2019083118 | v16retro1e | $PREFIX/WCOSS_D/gfsv16/v16retro1e/``yyyymmddhh`` | ++-----------------------------+---------------+--------------------------------------------------+ +| 2019090100 ~ 2019110918 | v16retro2e | $PREFIX/WCOSS_D/gfsv16/v16retro2e/``yyyymmddhh`` | ++-----------------------------+---------------+--------------------------------------------------+ +| 2019111000 ~ 2020122200 | v16rt2 | $PREFIX/WCOSS_D/gfsv16/v16rt2/``yyyymmddhh`` | ++-----------------------------+---------------+--------------------------------------------------+ +| 2020122206 ~ implementation | v16rt2n | $PREFIX/WCOSS_D/gfsv16/v16rt2n/``yyyymmddhh`` | ++-----------------------------+---------------+--------------------------------------------------+ + +---------------------------------------------------------- +GFSv15 (Q2FY19) Pre-Implementation Parallel HPSS Locations +---------------------------------------------------------- + ++---------------------+-----------------+-----------------------------------------------------------+ +| Time Period | Parallel Name | Archive Location on HPSS | +| | | PREFIX=/NCEPDEV/emc-global/5year | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20180525 - 20190612 | prfv3rt1 | $PREFIX/emc.glopara/WCOSS_C/Q2FY19/prfv3rt1 | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20171125 - 20170831 | fv3q2fy19retro1 | $PREFIX/Fanglin.Yang/WCOSS_DELL_P3/Q2FY19/fv3q2fy19retro1 | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20170525 - 20170625 | fv3q2fy19retro2 | $PREFIX/emc.glopara/WCOSS_C/Q2FY19/fv3q2fy19retro2 | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20170802 - 20171130 | fv3q2fy19retro2 | $PREFIX/Fanglin.Yang/WCOSS_DELL_P3/Q2FY19/fv3q2fy19retro2 | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20161125 - 20170531 | fv3q2fy19retro3 | $PREFIX/Fanglin.Yang/WCOSS_DELL_P3/Q2FY19/fv3q2fy19retro3 | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20160817 - 20161130 | fv3q2fy19retro4 | $PREFIX/emc.glopara/WCOSS_DELL_P3/Q2FY19/fv3q2fy19retro4 | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20160522 - 20160825 | fv3q2fy19retro4 | $PREFIX/emc.glopara/WCOSS_C/Q2FY19/fv3q2fy19retro4 | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20151125 - 20160531 | fv3q2fy19retro5 | $PREFIX/emc.glopara/WCOSS_DELL_P3/Q2FY19/fv3q2fy19retro5 | ++---------------------+-----------------+-----------------------------------------------------------+ +| 20150503 - 20151130 | fv3q2fy19retro6 | $PREFIX/emc.glopara/WCOSS_DELL_P3/Q2FY19/fv3q2fy19retro6 | ++---------------------+-----------------+-----------------------------------------------------------+ + +.. _gfsv17-warmstarts: + +*************************************** +Using pre-GFSv17 warm starts for GFSv17 +*************************************** + +If a user wishes to run a high-res (C768C384L127) GFSv17 experiment with warm starts from the operational GFSv16 (or older) warm starts, they must process the initial condition files before using. See details below in the :ref:`Fix netcdf checksum section `. + +.. _gfsv17-checksum: + +------------------------- +Fix NetCDF checksum issue +------------------------- + +Due to a recent change in UFS, the setting to bypass the data verification no longer works, so you may also need an additional offline step to delete the checksum of the NetCDF files for warm start: + +On RDHPCS: + +:: + + module load nco/4.9.3 + +On WCOSS2: + +:: + + module load intel/19.1.3.304 + module load netcdf/4.7.4 + module load udunits/2.2.28 + module load gsl/2.7 + module load nco/4.7.9 + +And then on all platforms: + +:: + + cd $COMROT + for f in $(find ./ -name *tile*.nc); do echo $f; ncatted -a checksum,,d,, $f; done diff --git a/docs/source/jobs.rst b/docs/source/jobs.rst new file mode 100644 index 0000000000..67863bb9a2 --- /dev/null +++ b/docs/source/jobs.rst @@ -0,0 +1,87 @@ +################# +GFS Configuration +################# + +.. figure:: _static/GFS_v16_flowchart.png + + Schematic flow chart for GFS v16 in operations + +The sequence of jobs that are run for an end-to-end (analysis+forecast+post processing+verification) GFS configuration using the Global Workflow is shown above. The system utilizes a collection of scripts that perform the tasks for each step. + +For any cycle the system consists of two suites -- the "gdas" suite which provides the initial guess fields, and the "gfs" suite which creates the initial conditions and forecast of the system. As with the operational system, the gdas runs for each cycle (00, 06, 12, and 18 UTC), however, to save time and space in experiments, the gfs (right side of the diagram) is initially setup to run for only the 00 UTC cycle (See the "run GFS this cycle?" portion of the diagram). The option to run the GFS for all four cycles is available (see the ``gfs_cyc`` variable in configuration file). + +An experimental run is different from operations in the following ways: + +* Workflow manager: operations utilizes `ecFlow `__, while development currently utilizes `ROCOTO `__. Note, experiments can also be run using ecFlow on platforms with ecFlow servers established. + +* Dump step is not run as it has already been completed during the real-time production runs and dump data is available in the global dump archive on supported machines. + +* Addition steps in experimental mode: + + - verification (vrfy) + + - archive (arch) + +Downstream jobs (e.g. awips, gempak, etc.) are not included in the diagram. Those jobs are not normally run in developmental tests. + +============================= +Jobs in the GFS Configuration +============================= ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| JOB NAME | PURPOSE | ++===================+=======================================================================================================================+ +| anal | Runs the analysis. 1) Runs the atmospheric analysis (global_gsi) to produce analysis increments; 2) Update surface | +| | guess file via global_cycle to create surface analysis on tiles. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| analcalc | Adds the analysis increments to previous cycle’s forecasts to produce atmospheric analysis files. Produces surface | +| | analysis file on Gaussian grid. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| analdiag | Creates netCDF diagnostic files containing observation values, innovation (O-F), error, quality control, as well as | +| | other analysis-related quantities (cnvstat, radstat, ozstat files). | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| arch | Archives select files from the deterministic model and cleans up older data. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| earcN/eamn | Archival script for EnKF: 1) Write select EnKF output to HPSS; 2) Copy select files to online archive; 3) Clean up | +| | EnKF temporary run directories; 4) Remove "old" EnKF files from rotating directory. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| ecenN/ecmn | Recenter ensemble members around hi-res deterministic analysis. GFS v16 recenters ensemble member analysis. | +| | increments. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| echgres | Runs chgres on full-resolution forecast for EnKF recentering (ecen). | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| ediag | Same as analdiag but for ensemble members. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| efcsN/efmn | Run 9 hour forecast for each ensemble member. There are 80 ensemble members. Each efcs job sequentially processes 8 | +| | ensemble members, so there are 10 efcs jobs in total. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| eobs | Data selection for EnKF update (eupd). | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| eposN/epmn | Generate ensemble mean atmospheric and surface forecast files. The ensemble spread is also computed for atmospheric | +| | forecast files. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| esfc | Generate ensemble surface analyses on tiles. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| eupd | Perform EnKF update (i.e., generate ensemble member analyses). | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| fcst | Runs the forecast (with or without one-way waves). | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| metpN | Runs MET/METplus verification via EMC_verif-global. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| prep | Runs the data preprocessing prior to the analysis (storm relocation if needed and generation of prepbufr file). | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| postN | Runs the post processor. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| vrfy | Runs the verification tasks. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| waveinit | Runs wave initialization step. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| waveprep | Runs wave prep step. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| wavepostsbs | Runs wave post-processing side-by-side. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| wavepostbndpnt | Runs wave post-processing for boundary points. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| wavepostbndpntbll | Runs wave post-processing for boundary points bulletins. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ +| wavepostpnt | Runs wave post-processing for points. | ++-------------------+-----------------------------------------------------------------------------------------------------------------------+ diff --git a/docs/source/monitor_rocoto.rst b/docs/source/monitor_rocoto.rst new file mode 100644 index 0000000000..1b8b2a3836 --- /dev/null +++ b/docs/source/monitor_rocoto.rst @@ -0,0 +1,136 @@ +================== +Monitor ROCOTO Run +================== + +Click `here `__ to view full rocoto documentation on GitHub + + +^^^^^^^^^^^^^^^^^^ +Using command line +^^^^^^^^^^^^^^^^^^ + +You can use Rocoto commands with arguments to check the status of your experiment. + +Start or continue a run: + +:: + + rocotorun -d /path/to/workflow/database/file -w /path/to/workflow/xml/file + +Check the status of the workflow: + +:: + + rocotostat -d /path/to/workflow/database/file -w /path/to/workflow/xml/file [-c YYYYMMDDCCmm,[YYYYMMDDCCmm,...]] [-t taskname,[taskname,...]] [-s] [-T] + +.. note:: + YYYYMMDDCCmm = YearMonthDayCycleMinute ...where mm/Minute is ’00’ for all cycles currently. + +Check the status of a job: + +:: + + rocotocheck -d /path/to/workflow/database/file -w /path/to/workflow/xml/file -c YYYYMMDDCCmm -t taskname + +Force a task to run (ignores dependencies - USE CAREFULLY!): + +:: + + rocotoboot -d /path/to/workflow/database/file -w /path/to/workflow/xml/file -c YYYYMMDDCCmm -t taskname + +Rerun task(s): + +:: + + rocotorewind -d /path/to/workflow/database/file -w /path/to/workflow/xml/file -c YYYYMMDDCCmm -t taskname + + (If job is currently queued or running rocoto will kill the job. Run rocotorun afterwards to fire off rewound task.) + +Set a task to complete (overwrites current state): + +:: + + rocotocomplete -d /path/to/workflow/database/file -w /path/to/workflow/xml/file -c YYYYMMDDCCmm -t taskname + +(Will not kill queued or running job, only update status.) + +Several dates and task names may be specified in the same command by adding more -c and -t options. However, lists are not allowed. + +^^^^^^^^^^^^^^^^^ +Use ROCOTO viewer +^^^^^^^^^^^^^^^^^ + +An alternative approach is to use a GUI that was designed to assist with monitoring global workflow experiments that use ROCOTO. It can be found under the ``workflow`` folder in global-workflow. + +***** +Usage +***** + +:: + + ./rocoto_viewer.py -d /path/to/workflow/database/file -w /path/to/workflow/xml/file + +.. note:: + Note 1: Terminal/window must be wide enough to display all experiment information columns, viewer will complain if not. + + Note 2: The viewer requires the full path to the database and xml files if you are not in your EXPDIR when you invoke it. + + Note 3: Only ``TERM=xterm`` is supported. You may wish to create a shell function to switch automatically if you are in a different terminal: + + Bash example: + + :: + + function rv { + oldterm=${TERM}; + export TERM='xterm'; + ${PATH_TO_VIEWER}/rocoto_viewer.py $@; + export TERM=${oldterm}; + } + +********************* +What the viewer shows +********************* + + .. figure:: _static/fv3_rocoto_view.png + + Sample output from Rocoto viewer + +The figure above shows a sample output from a Rocoto viewer for a running experiment. Where: + + * First column: cycle (YYYYMMDDCCmm, YYYY=year, MM=month, DD=day, CC=cycle hour, mm=minute) + * Second column: task name (a "<" symbol indicates a group/meta-task, click "x" when meta-task is selected to expand/collapse) + * Third column: job ID from scheduler + * Fourth column: job state (QUEUED, RUNNING, SUCCEEDED, FAILED, or DEAD) + * Fifth column: exit code (0 if all ended well) + * Sixth column: number of tries/attempts to run job (0 when not yet run or just rewound, 1 when run once successfully, 2+ for multiple tries up to max try value where job is considered DEAD) + * Seventh column: job duration in seconds + +************************** +How to navigate the viewer +************************** + +The rocoto viewer accepts both mouse and keyboard inputs. Click “h” for help menu and more options. + +Available viewer commands:: + + c = get information on selected job + r = rewind (rerun) selected job, group, or cycle + R = run rocotorun + b = boot (forcibly run) selected job or group + -> = right arrow key, advance viewer forward to next cycle + <- = left arrow key, advance viewer backward to previous cycle + Q = quit/exit viewer + +Advanced features: + + * Select multiple tasks at once + + - Click “Enter” on a task to select it, click on other tasks or use the up/down arrows to move to other tasks and click “Enter” to select them as well. + - When you next choose “r” for rewinding the pop-up window will now ask if you are sure you want to rewind all those selected tasks. + + * Rewind entire group or cycle + + - Group - While group/metatask is collapsed (<) click “r” to rewind whole group/metatask. + - Cycle - Use up arrow to move selector up past the first task until the entire left column is highlighted. Click “r” and the entire cycle will be rewound. + diff --git a/docs/source/output.rst b/docs/source/output.rst new file mode 100644 index 0000000000..5ccbbb0fc1 --- /dev/null +++ b/docs/source/output.rst @@ -0,0 +1,20 @@ +############### +Plotting Output +############### + +=============== +Analysis output +=============== + +The `GSI Monitor `_ repository contains a monitoring package called **RadMon**. This package reads the information on the radiances contained in the radstat files, such as quality control flags and departure statistics, and produces a webpage with many plots such as time series of data counts for a particular instrument. You can also directly compare two different experiments with this tool. If there are quantities that you are interested in but the RadMon package is not plotting them for you, you can use the existing RadMon code as a guide for how to read them and plot them yourself. The radstat files contain a wealth of information. + +The RadMon package can be found under the ``src/Radiance_Monitor`` folder within the `GSI Monitor`_. If checked out under global-workflow you will find it under ``gsi_monitor.fd/src/Radiance_Monitor``. + +If you have questions or issues getting the package to work for you please contact the developer of RadMon: Ed Safford (edward.safford@noaa.gov). + +=============== +Forecast output +=============== + +This section will be updated when we have some basic plotting utilities using EMCPY + diff --git a/docs/source/run.rst b/docs/source/run.rst new file mode 100644 index 0000000000..56728d3282 --- /dev/null +++ b/docs/source/run.rst @@ -0,0 +1,16 @@ +################### +Run Global Workflow +################### + +Here we will show how you can run an experiment using the Global Workflow. The Global workflow is regularly evolving and the underlying UFS-weather-model that it drives can run many different configurations. So this part of the document will be regularly updated. The workflow as it is configured today can be run as forecast only or cycled (forecast+Data Assimilation). Since cycled mode requires a number of Data Assimilation supporting repositories to be checked out, the instructions for the two modes from initial checkout stage will be slightly different. Apart from this there is a third mode that is rarely used in development mode and is primarily for operational use. This mode switches on specialized post processing needed by the aviation industry. Since the files associated with this mode are restricted, only select users will have need and/or ability to run in this mode. + +.. toctree:: + + clone.rst + init.rst + setup.rst + configure.rst + start.rst + monitor_rocoto.rst + view.rst + errors_faq.rst diff --git a/docs/source/setup.rst b/docs/source/setup.rst new file mode 100644 index 0000000000..ec63327fbc --- /dev/null +++ b/docs/source/setup.rst @@ -0,0 +1,304 @@ +================ +Experiment Setup +================ + + Global workflow uses a set of scripts to help configure and set up the drivers (also referred to as Workflow Manager) that run the end-to-end system. While currently we use a `ROCOTO `__ based system and that is documented here, an `ecFlow `__ based systm is also under development and will be introduced to the Global Workflow when it is mature. To run the setup scripts, you need to make sure to have a copy of ``python3`` with ``numpy`` available. The easiest way to guarantee this is to load python from the `official hpc-stack installation `_ for the machine you are on: + +.. list-table:: Python Module Load Commands + :widths: 25 120 + :header-rows: 1 + + * - **MACHINE** + - **COMMAND(S)** + * - Hera + - :: + + module use -a /contrib/anaconda/modulefiles + module load anaconda/anaconda3-5.3.1 + * - Orion + - :: + + module load python/3.7.5 + * - WCOSS2 + - :: + + module load python/3.8.6 + * - S4 + - :: + + module load miniconda/3.8-s4 + + * - Jet + - :: + + module use /mnt/lfs4/HFIP/hfv3gfs/role.epic/miniconda3/modulefiles + module load miniconda3/4.12.0 + conda activate ufswm + +If running with Rocoto make sure to have a Rocoto module loaded before running setup scripts: + +.. list-table:: ROCOTO Module Load Commands + :widths: 25 120 + :header-rows: 1 + + * - **MACHINE** + - **COMMAND(S)** + * - Hera + - :: + + module load rocoto/1.3.3 + * - Orion + - :: + + module load contrib + module load rocoto/1.3.3 + * - WCOSS2 + - :: + + module use /apps/ops/test/nco/modulefiles/ + module load core/rocoto/1.3.5 + * - S4 + - :: + + module load rocoto/1.3.4 + * - Jet + - :: + + module load rocoto/1.3.3 + +^^^^^^^^^^^^^^^^^^^^^^^^ +Forecast-only experiment +^^^^^^^^^^^^^^^^^^^^^^^^ + +Scripts that will be used: + + * ``workflow/setup_expt.py`` + * ``workflow/setup_xml.py`` + +*************************************** +Step 1: Run experiment generator script +*************************************** + +The following command examples include variables for reference but users should not use environmental variables but explicit values to submit the commands. Exporting variables like EXPDIR to your environment causes an error when the python scripts run. Please explicitly include the argument inputs when running both setup scripts: + +:: + + cd workflow + ./setup_expt.py gfs forecast-only --idate $IDATE --edate $EDATE [--app $APP] [--start $START] [--gfs_cyc $GFS_CYC] [--resdet $RESDET] + [--pslot $PSLOT] [--configdir $CONFIGDIR] [--comrot $COMROT] [--expdir $EXPDIR] + +where: + + * ``gfs`` is the first positional argument that instructs the setup script to produce a GFS experiment directory + * ``forecast-only`` is the second positional argument that instructs the setup script to produce an experiment directory for forecast only experiments. + * ``$APP`` is the target application, one of: + + - ATM: atmosphere-only [default] + - ATMW: atm-wave + - ATMA: atm-aerosols + - S2S: atm-ocean-ice + - S2SA: atm-ocean-ice-aerosols + - S2SW: atm-ocean-ice-wave + - S2SWA: atm-ocean-ice-wave-aerosols + + * ``$START`` is the start type (warm or cold [default]) + * ``$IDATE`` is the initial start date of your run (first cycle CDATE, YYYYMMDDCC) + * ``$EDATE`` is the ending date of your run (YYYYMMDDCC) and is the last cycle that will complete + * ``$PSLOT`` is the name of your experiment [default: test] + * ``$CONFIGDIR`` is the path to the ``/config`` folder under the copy of the system you're using [default: $TOP_OF_CLONE/parm/config/] + * ``$RESDET`` is the FV3 resolution (i.e. 768 for C768) [default: 384] + * ``$GFS_CYC`` is the forecast frequency (0 = none, 1 = 00z only [default], 2 = 00z & 12z, 4 = all cycles) + * ``$COMROT`` is the path to your experiment output directory. DO NOT include PSLOT folder at end of path, it’ll be built for you. [default: $HOME (but do not use default due to limited space in home directories normally, provide a path to a larger scratch space)] + * ``$EXPDIR`` is the path to your experiment directory where your configs will be placed and where you will find your workflow monitoring files (i.e. rocoto database and xml file). DO NOT include PSLOT folder at end of path, it will be built for you. [default: $HOME] + +Examples: + +Atm-only: + +:: + + cd workflow + ./setup_expt.py gfs forecast-only --pslot test --idate 2020010100 --edate 2020010118 --resdet 384 --gfs_cyc 4 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir + +Coupled: + +:: + + cd workflow + ./setup_expt.py gfs forecast-only --app S2SW --pslot coupled_test --idate 2013040100 --edate 2013040100 --resdet 384 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir + +Coupled with aerosols: + +:: + + cd workflow + ./setup_expt.py gfs forecast-only --app S2SWA --pslot coupled_test --idate 2013040100 --edate 2013040100 --resdet 384 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir + +**************************************** +Step 2: Set user and experiment settings +**************************************** + +Go to your EXPDIR and check/change the following variables within your config.base now before running the next script: + + * ACCOUNT + * HOMEDIR + * STMP + * PTMP + * ARCDIR (location on disk for online archive used by verification system) + * HPSSARCH (YES turns on archival) + * HPSS_PROJECT (project on HPSS if archiving) + * ATARDIR (location on HPSS if archiving) + +Some of those variables will be found within a machine-specific if-block so make sure to change the correct ones for the machine you'll be running on. + +Now is also the time to change any other variables/settings you wish to change in config.base or other configs. `Do that now.` Once done making changes to the configs in your EXPDIR go back to your clone to run the second setup script. See :doc:configure.rst for more information on configuring your run. + +************************************* +Step 3: Run workflow generator script +************************************* + +This step sets up the files needed by the Workflow Manager/Driver. At this moment only ROCOTO configurations are generated: + +:: + + ./setup_xml.py $EXPDIR/$PSLOT + +Example: + +:: + + ./setup_xml.py /some_safe_disk_area/Joe.Schmo/expdir/test + +Additional options for setting up Rocoto are available with `setup_xml.py -h` that allow users to change the number of failed tries, number of concurrent cycles and tasks as well as Rocoto's verbosity levels. + +**************************************** +Step 4: Confirm files from setup scripts +**************************************** + +You will now have a rocoto xml file in your EXPDIR ($PSLOT.xml) and a crontab file generated for your use. Rocoto uses CRON as the scheduler. If you do not have a crontab file you may not have had the rocoto module loaded. To fix this load a rocoto module and then rerun setup_xml.py script again. Follow directions for setting up the rocoto cron on the platform the experiment is going to run on. + +^^^^^^^^^^^^^^^^^ +Cycled experiment +^^^^^^^^^^^^^^^^^ + +Scripts that will be used: + + * ``workflow/setup_expt.py`` + * ``workflow/setup_xml.py`` + +*************************************** +Step 1) Run experiment generator script +*************************************** + +The following command examples include variables for reference but users should not use environmental variables but explicit values to submit the commands. Exporting variables like EXPDIR to your environment causes an error when the python scripts run. Please explicitly include the argument inputs when running both setup scripts: + +:: + + cd workflow + ./setup_expt.py gfs cycled --idate $IDATE --edate $EDATE [--app $APP] [--start $START] [--gfs_cyc $GFS_CYC] + [--resdet $RESDET] [--resens $RESENS] [--nens $NENS] [--cdump $CDUMP] + [--pslot $PSLOT] [--configdir $CONFIGDIR] [--comrot $COMROT] [--expdir $EXPDIR] [--icsdir $ICSDIR] + +where: + + * ``gfs`` is the first positional argument that instructs the setup script to produce a GFS experiment directory + * ``cycled`` is the second positional argument that instructs the setup script to produce an experiment directory for cycled experiments. + * ``$APP`` is the target application, one of: + + - ATM: atmosphere-only [default] + - ATMW: atm-wave + + * ``$IDATE`` is the initial start date of your run (first cycle CDATE, YYYYMMDDCC) + * ``$EDATE`` is the ending date of your run (YYYYMMDDCC) and is the last cycle that will complete + * ``$START`` is the start type (warm or cold [default]) + * ``$GFS_CYC`` is the forecast frequency (0 = none, 1 = 00z only [default], 2 = 00z & 12z, 4 = all cycles) + * ``$RESDET`` is the FV3 resolution of the deterministic forecast [default: 384] + * ``$RESENS`` is the FV3 resolution of the ensemble (EnKF) forecast [default: 192] + * ``$NENS`` is the number of ensemble members [default: 20] + * ``$CDUMP`` is the starting phase [default: gdas] + * ``$PSLOT`` is the name of your experiment [default: test] + * ``$CONFIGDIR`` is the path to the config folder under the copy of the system you're using [default: $TOP_OF_CLONE/parm/config/] + * ``$COMROT`` is the path to your experiment output directory. DO NOT include PSLOT folder at end of path, it’ll be built for you. [default: $HOME] + * ``$EXPDIR`` is the path to your experiment directory where your configs will be placed and where you will find your workflow monitoring files (i.e. rocoto database and xml file). DO NOT include PSLOT folder at end of path, it will be built for you. [default: $HOME] + * ``$ICSDIR`` is the path to the ICs for your run if generated separately. [default: None] + +.. [#] More Coupled configurations in cycled mode are currently under development and not yet available + +Example: + +:: + + cd workflow + ./setup_expt.py gfs cycled --pslot test --configdir /home/Joe.Schmo/git/global-workflow/parm/config --idate 2020010100 --edate 2020010118 --comrot /some_large_disk_area/Joe.Schmo/comrot --expdir /some_safe_disk_area/Joe.Schmo/expdir --resdet 384 --resens 192 --nens 80 --gfs_cyc 4 + +Example ``setup_expt.py`` on Orion: + +:: + + Orion-login-3$ ./setup_expt.py gfs cycled --pslot test --idate 2022010118 --edate 2022010200 --resdet 192 --resens 96 --nens 80 --comrot /work/noaa/stmp/jschmo/comrot --expdir /work/noaa/global/jschmo/expdir + EDITED: /work/noaa/global/jschmo/expdir/test/config.base as per user input. + EDITED: /work/noaa/global/jschmo/expdir/test/config.aeroanl as per user input. + EDITED: /work/noaa/global/jschmo/expdir/test/config.ocnanal as per user input. + +The message about the config.base.default is telling you that you are free to delete it if you wish but it’s not necessary to remove. Your resulting config.base was generated from config.base.default and the default one is there for your information. + +What happens if I run ``setup_expt.py`` again for an experiment that already exists? + +:: + + Orion-login-3$ ./setup_expt.py gfs cycled --pslot test --idate 2022010118 --edate 2022010200 --resdet 192 --resens 96 --nens 80 --comrot /work/noaa/stmp/jschmo/comrot --expdir /work/noaa/global/jschmo/expdir + + directory already exists in /work/noaa/stmp/jschmo/comrot/test + + Do you wish to over-write [y/N]: y + + directory already exists in /work/noaa/global/jschmo/expdir/test + + Do you wish to over-write [y/N]: y + EDITED: /work/noaa/global/jschmo/expdir/test/config.base as per user input. + EDITED: /work/noaa/global/jschmo/expdir/test/config.aeroanl as per user input. + EDITED: /work/noaa/global/jschmo/expdir/test/config.ocnanal as per user input. + +Your ``COMROT`` and ``EXPDIR`` will be deleted and remade. Be careful with this! + +**************************************** +Step 2: Set user and experiment settings +**************************************** + +Go to your EXPDIR and check/change the following variables within your config.base now before running the next script: + + * ACCOUNT + * HOMEDIR + * STMP + * PTMP + * ARCDIR (location on disk for online archive used by verification system) + * HPSSARCH (YES turns on archival) + * HPSS_PROJECT (project on HPSS if archiving) + * ATARDIR (location on HPSS if archiving) + +Some of those variables will be found within a machine-specific if-block so make sure to change the correct ones for the machine you'll be running on. + +Now is also the time to change any other variables/settings you wish to change in config.base or other configs. `Do that now.` Once done making changes to the configs in your EXPDIR go back to your clone to run the second setup script. See :doc: configure.rst for more information on configuring your run. + + +************************************* +Step 3: Run workflow generator script +************************************* + +This step sets up the files needed by the Workflow Manager/Driver. At this moment only ROCOTO configurations are generated: + +:: + + ./setup_xml.py $EXPDIR/$PSLOT + +Example: + +:: + + ./setup_xml.py /some_safe_disk_area/Joe.Schmo/expdir/test + +**************************************** +Step 4: Confirm files from setup scripts +**************************************** + +You will now have a rocoto xml file in your EXPDIR ($PSLOT.xml) and a crontab file generated for your use. Rocoto uses CRON as the scheduler. If you do not have a crontab file you may not have had the rocoto module loaded. To fix this load a rocoto module and then rerun ``setup_xml.py`` script again. Follow directions for setting up the rocoto cron on the platform the experiment is going to run on. diff --git a/docs/source/start.rst b/docs/source/start.rst new file mode 100644 index 0000000000..957971e637 --- /dev/null +++ b/docs/source/start.rst @@ -0,0 +1,48 @@ +============== +Start your run +============== + +Make sure a rocoto module is loaded: ``module load rocoto`` + +If needed check for available rocoto modules on machine: ``module avail rocoto`` or ``module spider rocoto`` + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Start your run from within your EXPDIR +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +:: + + rocotorun -d $PSLOT.db -w $PSLOT.xml + +The first jobs of your run should now be queued or already running (depending on machine traffic). How exciting! + +You'll now have a "logs" folder in both your COMROT and EXPDIR. The EXPDIR log folder contains workflow log files (e.g. rocoto command results) and the COMROT log folder will contain logs for each job (previously known as dayfiles). + +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Set up your experiment cron +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. note:: + Orion currently only supports cron on Orion-login-1. Cron support for other login nodes is coming in the future. + +:: + + crontab -e + +or + +:: + + crontab $PSLOT.crontab + +.. warning:: + + The ``crontab $PSLOT.crontab`` command will overwrite existing crontab file on your login node. If running multiple crons recommend editing crontab file with ``crontab -e`` command. + +Check your crontab settings:: + + crontab -l + +Crontab uses following format:: + + */5 * * * * /path/to/rocotorun -w /path/to/workflow/definition/file -d /path/to/workflow/database/file diff --git a/docs/source/view.rst b/docs/source/view.rst new file mode 100644 index 0000000000..3093755e9a --- /dev/null +++ b/docs/source/view.rst @@ -0,0 +1,46 @@ +====================== +View Experiment output +====================== + +The output from your run will be found in the ``COMROT/ROTDIR`` you established. This is also where you placed your initial conditions. Within your ``COMROT`` you will have the following directory structure (based on the type of experiment you run): + +^^^^^^^^^^^^^ +Forecast-only +^^^^^^^^^^^^^ + +:: + + gfs.YYYYMMDD/CC/atmos <- contains deterministic long forecast gfs inputs/outputs (atmosphere) + gfs.YYYYMMDD/CC/wave <- contains deterministic long forecast gfs inputs/outputs (wave) + logs/ <- logs for each cycle in the run + vrfyarch/ <- contains files related to verification and archival + +^^^^^^ +Cycled +^^^^^^ + +:: + + enkfgdas.YYYYMMDD/CC/mem###/atmos <- contains EnKF inputs/outputs for each cycle and each member + gdas.YYYYMMDD/CC/atmos <- contains deterministic gdas inputs/outputs (atmosphere) + gdas.YYYYMMDD/CC/wave <- contains deterministic gdas inputs/outputs (wave) + gfs.YYYYMMDD/CC/atmos <- contains deterministic long forecast gfs inputs/outputs (atmosphere) + gfs.YYYYMMDD/CC/wave <- contains deterministic long forecast gfs inputs/outputs (wave) + logs/ <- logs for each cycle in the run + vrfyarch/ <- contains files related to verification and archival + +Here is an example ``COMROT`` for a cycled run as it may look several cycles in (note the archival steps remove older cycle folders as the run progresses): + +:: + + -bash-4.2$ ll /scratch1/NCEPDEV/stmp4/Joe.Schmo/comrot/testcyc192 + total 88 + drwxr-sr-x 4 Joe.Schmo stmp 4096 Oct 22 04:50 enkfgdas.20190529 + drwxr-sr-x 4 Joe.Schmo stmp 4096 Oct 22 07:20 enkfgdas.20190530 + drwxr-sr-x 6 Joe.Schmo stmp 4096 Oct 22 03:15 gdas.20190529 + drwxr-sr-x 4 Joe.Schmo stmp 4096 Oct 22 07:15 gdas.20190530 + drwxr-sr-x 6 Joe.Schmo stmp 4096 Oct 22 03:15 gfs.20190529 + drwxr-sr-x 4 Joe.Schmo stmp 4096 Oct 22 07:15 gfs.20190530 + drwxr-sr-x 120 Joe.Schmo stmp 12288 Oct 22 07:15 logs + drwxr-sr-x 13 Joe.Schmo stmp 4096 Oct 22 07:07 vrfyarch + diff --git a/docs/source/wave.rst b/docs/source/wave.rst new file mode 100644 index 0000000000..4ff29ab245 --- /dev/null +++ b/docs/source/wave.rst @@ -0,0 +1,132 @@ +============== +Wave Component +============== + +The wave model, WAVEWATCH III, is one component of the forecast model. This documentation is a work in progress +and currently contains information about how to add a new grid to the workflow. + +^^^^^^^^^^^^^^^^^^^^^^ +Adding a new wave Grid +^^^^^^^^^^^^^^^^^^^^^^ + +******************** +Naming the Wave Grid +******************** + +The naming convention of the wave grid within the workflow is expected to follow a convention of region underscore resolution. +Here are several regional naming conventions: + ++-----------+-----------------------+ +| Acronym | Description | ++===========+=======================+ +| glo | Global domain | ++-----------+-----------------------+ +| ak | Alaska | ++-----------+-----------------------+ +| ao or aoc | Arctic Ocean | ++-----------+-----------------------+ +| at | Atlantic | ++-----------+-----------------------+ +| ep | East Pacific | ++-----------+-----------------------+ +| gnh | Global Northern Ocean | ++-----------+-----------------------+ +| gsh | Global Southern Ocean | ++-----------+-----------------------+ +| so | Southern Ocean | ++-----------+-----------------------+ +| wc | West Coast | ++-----------+-----------------------+ + + +Here are examples of resolution names: + ++---------+----------------------+ +| Acronym | Description | ++=========+======================+ +| 15m | 15 min | ++---------+----------------------+ +| 15mxt | 15 min extended grid | ++---------+----------------------+ +| 025 | 0.25 degrees | ++---------+----------------------+ +| 9km | 9 km | ++---------+----------------------+ + +This new grid name will now be referred to as ${WAVEGRID} for the following documentation. + +**************** +Adding Fix Files +**************** + +The following is a table of fix files to be added with a new grid. + ++-------------------------------------+----------------------------------------+--------------------------------------------------+ +| Name of File | File Description | Notes/Requriements | ++=====================================+========================================+==================================================+ +| ww3_grid.inp_${WAVEGRID} | Grid Input File | Required for all new grids | ++-------------------------------------+----------------------------------------+--------------------------------------------------+ +| mesh.${WAVEGRID}.nc | Mesh grid file used in forecast | This is required if this is a computational grid | ++-------------------------------------+----------------------------------------+--------------------------------------------------+ +| ww3_gint.WHTGRIDINT.bin.${WAVEGRID} | Weight file for ww3_gint interpolation | Required if output is interpolated to this grid | ++-------------------------------------+----------------------------------------+--------------------------------------------------+ + + +While the creation of these files are generally considered out of scope of this document here are some additional information: + +* The ww3_grid.inp_${WAVEGRID} for structured grids can be created with https://github.com/noaA-EMC/gridgen +* The ww3_grid.inp_${WAVEGRID} file must be a single file for the workflow +* Instructions for creating mesh.${WAVEGRID}.nc can be found at https://ufs-weather-model.readthedocs.io/en/latest/InputsOutputs.html#ww3 +* The ww3_gint.WHTGRIDINT.bin.${WAVEGRID} can be created by running the ww3_gint routine as desired and then saved. + +Once the new fix files have been created, :ref:`open an issue to have the master fix file directory updated`. This is a separate step than the process to update the workflow below. + +******************************** +Updating Config and Script Files +******************************** + +You will need to update the following files: + +* parm/config/*/config.ufs +* parm/config/*/config.wave +* scripts/exgfs_wave_post_gridded_sbs.sh + +You will need to add the following files: + +* parm/wave/ww3_grib2.${WAVEGRID}.inp.tmpl +* parm/wave/${WAVEGRID}_interp.inp.tmpl + +For config.ufs: + +If this is a computational you will need to update this file. If this is a new output file you can skip this update. +There are two sections to update: +1. Add the new grid as a possible ww3 resolution at the top of the file +2. In the WW3 specific settings section, add a case for the new grid and define ntasks_ww3 (number of tasks for WW3) and +if the grid requires a different value of nthreads_ww3 (number of threads) add that to the grid specific case as well. + +For config.wave: + +If this is a computational grid, add a section for the newly added grid in the possible cases for waveGRD, adding the appropriate +non-default values for this specific grid. If this is a new output grid, then update all of the waveGRD cases for waveinterpGRD as +needed. Please note that if you change the default wave grid, that initial conditions must be staged for this IC for all ICs used +in global-workflow testing. Ideally ICs for the grids being used for testing will be provided even if its not the default grid. + + + +For scripts/exgfs_wave_post_gridded_sbs.sh and the inp.tmpl files: + +There are two case settings for either interpolated grid output or grib output for a computational grid that should be updated to +add to the possible cases for this new grid as appropriate. If it's a new interpolated grid, you will also need to add a +parm/wave/${WAVEGRID}_interp.inp.tmpl file. See WW3/model/inp/ww3_gint.inp for information about this file. If it's a new +computational grid or just an output grid you will need a new parm/wave/ww3_grib2.${WAVEGRID}.inp.tmpl file. See the file +WW3/model/inp/ww3_grib.inp for more infomration about this input file. The other existing templated input files can be used +as guides to create these new files. + + + + +***************************** +Updates for New Product Grids +***************************** + +If this is a new file for AWIPS Products, additional updates are likely required. diff --git a/driver/gdas/para_config.gdas_analysis_high b/driver/gdas/para_config.gdas_analysis_high deleted file mode 100644 index 053c201541..0000000000 --- a/driver/gdas/para_config.gdas_analysis_high +++ /dev/null @@ -1,33 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=YES -export SENDCOM=YES -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_enkf_fcst b/driver/gdas/para_config.gdas_enkf_fcst deleted file mode 100755 index 34d6551cb9..0000000000 --- a/driver/gdas/para_config.gdas_enkf_fcst +++ /dev/null @@ -1,66 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgdas=$NWTEST/gdas.${gdas_ver} -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - -# Set beginning and ending ensemble member on EnKF group. -if [[ $ENSGRP -eq 1 ]]; then - export ENSBEG=1 - export ENSEND=8 -elif [[ $ENSGRP -eq 2 ]]; then - export ENSBEG=9 - export ENSEND=16 -elif [[ $ENSGRP -eq 3 ]]; then - export ENSBEG=17 - export ENSEND=24 -elif [[ $ENSGRP -eq 4 ]]; then - export ENSBEG=25 - export ENSEND=32 -elif [[ $ENSGRP -eq 5 ]]; then - export ENSBEG=33 - export ENSEND=40 -elif [[ $ENSGRP -eq 6 ]]; then - export ENSBEG=41 - export ENSEND=48 -elif [[ $ENSGRP -eq 7 ]]; then - export ENSBEG=49 - export ENSEND=56 -elif [[ $ENSGRP -eq 8 ]]; then - export ENSBEG=57 - export ENSEND=64 -elif [[ $ENSGRP -eq 9 ]]; then - export ENSBEG=65 - export ENSEND=72 -elif [[ $ENSGRP -eq 10 ]]; then - export ENSBEG=73 - export ENSEND=80 -else - echo "***ERROR*** INVALID ENSGRP=$ENSGRP" - exit -fi diff --git a/driver/gdas/para_config.gdas_enkf_inflate_recenter b/driver/gdas/para_config.gdas_enkf_inflate_recenter deleted file mode 100755 index fab9c4a333..0000000000 --- a/driver/gdas/para_config.gdas_enkf_inflate_recenter +++ /dev/null @@ -1,28 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=YES -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_enkf_innovate_obs b/driver/gdas/para_config.gdas_enkf_innovate_obs deleted file mode 100755 index a2827e1dfb..0000000000 --- a/driver/gdas/para_config.gdas_enkf_innovate_obs +++ /dev/null @@ -1,100 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - -# Set beginning and ending ensemble member on EnKF group. -if [[ $ENSGRP -eq 1 ]]; then - export ENSBEG=1 - export ENSEND=4 -elif [[ $ENSGRP -eq 2 ]]; then - export ENSBEG=5 - export ENSEND=8 -elif [[ $ENSGRP -eq 3 ]]; then - export ENSBEG=9 - export ENSEND=12 -elif [[ $ENSGRP -eq 4 ]]; then - export ENSBEG=13 - export ENSEND=16 -elif [[ $ENSGRP -eq 5 ]]; then - export ENSBEG=17 - export ENSEND=20 -elif [[ $ENSGRP -eq 6 ]]; then - export ENSBEG=21 - export ENSEND=24 -elif [[ $ENSGRP -eq 7 ]]; then - export ENSBEG=25 - export ENSEND=28 -elif [[ $ENSGRP -eq 8 ]]; then - export ENSBEG=29 - export ENSEND=32 -elif [[ $ENSGRP -eq 9 ]]; then - export ENSBEG=33 - export ENSEND=36 -elif [[ $ENSGRP -eq 10 ]]; then - export ENSBEG=37 - export ENSEND=40 -elif [[ $ENSGRP -eq 11 ]]; then - export ENSBEG=41 - export ENSEND=44 -elif [[ $ENSGRP -eq 12 ]]; then - export ENSBEG=45 - export ENSEND=48 -elif [[ $ENSGRP -eq 13 ]]; then - export ENSBEG=49 - export ENSEND=52 -elif [[ $ENSGRP -eq 14 ]]; then - export ENSBEG=53 - export ENSEND=56 -elif [[ $ENSGRP -eq 15 ]]; then - export ENSBEG=57 - export ENSEND=60 -elif [[ $ENSGRP -eq 16 ]]; then - export ENSBEG=61 - export ENSEND=64 -elif [[ $ENSGRP -eq 17 ]]; then - export ENSBEG=65 - export ENSEND=68 -elif [[ $ENSGRP -eq 18 ]]; then - export ENSBEG=69 - export ENSEND=72 -elif [[ $ENSGRP -eq 19 ]]; then - export ENSBEG=73 - export ENSEND=76 -elif [[ $ENSGRP -eq 20 ]]; then - export ENSBEG=77 - export ENSEND=80 -else - echo "***ERROR*** INVALID ENSGRP=$ENSGRP" - exit -fi diff --git a/driver/gdas/para_config.gdas_enkf_post b/driver/gdas/para_config.gdas_enkf_post deleted file mode 100755 index 58d68cc3e1..0000000000 --- a/driver/gdas/para_config.gdas_enkf_post +++ /dev/null @@ -1,29 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=YES -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_enkf_select_obs b/driver/gdas/para_config.gdas_enkf_select_obs deleted file mode 100755 index 04121afe92..0000000000 --- a/driver/gdas/para_config.gdas_enkf_select_obs +++ /dev/null @@ -1,33 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_enkf_update b/driver/gdas/para_config.gdas_enkf_update deleted file mode 100755 index 03b5385f8d..0000000000 --- a/driver/gdas/para_config.gdas_enkf_update +++ /dev/null @@ -1,33 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export SENDCOM=YES -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -export GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gdas/para_config.gdas_forecast_high b/driver/gdas/para_config.gdas_forecast_high deleted file mode 100755 index 4a25ae632a..0000000000 --- a/driver/gdas/para_config.gdas_forecast_high +++ /dev/null @@ -1,32 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gespath=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgdas=$NWTEST/gdas.${gdas_ver} -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - diff --git a/driver/gdas/para_config.gdas_gldas b/driver/gdas/para_config.gdas_gldas deleted file mode 100755 index a766601e22..0000000000 --- a/driver/gdas/para_config.gdas_gldas +++ /dev/null @@ -1,63 +0,0 @@ -################################################################## -# This parameter file set up the environment variables used in -# the J-Job scripts to run the gldas land analsis system -################################################################## -# module load for untility in run_all of lsf directory - -export GDATE=${PDY:-20191025} -export cyc=${cyc:-00} -export model=${model:-noah} -export QUEUE=debug -export PROJECT_CODE=NLDAS-T2O - -#export workdir=/gpfs/dell2/emc/modeling/noscrub/$LOGNAME/global-workflow -export workdir=${workdir:-$HOMEgfs} - -export gldas_ver=${gldas_ver:-v2.3.0} -export HOMEgldas=${HOMEgldas:-$workdir/sorc/gldas.fd} -export FIXgldas=${FIXgldas:-$HOMEgldas/fix} -export PARMgldas=${PARMgldas:-$HOMEgldas/parm} -export EXECgldas=${EXECgldas:-$HOMEgldas/exec} -export USHgldas=${USHgldas:-$HOMEgldas/ush} - -#export FIXgldas=/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_gldas -#export topodir=/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix/fix_fv3_gmted2010/C768/ -export FIXgldas=${FIXgldas:-$workdir/fix/fix_gldas} -export topodir=${topodir:-$workdir/fix/fix_fv3_gmted2010/C768} - - -export COM_IN=${COM_IN:-$workdir/com/gldas.${gldas_ver}} -export COM_OUT=${COM_OUT:-$COM_IN} - -# set up com and decom directory used -# GFS OPS -if [ ${model} == 'noah' ]; then -export COMROOT=/gpfs/dell1/nco/ops/com -export DCOMROOT=/gpfs/dell1/nco/ops/dcom -fi - -##GFS Retrospective test -#if [ ${model} == 'noah' ]; then -#export COMROOT=/gpfs/dell2/emc/retros/noscrub/Youlong.Xia/com -#export DCOMROOT=/gpfs/dell2/emc/retros/noscrub/Youlong.Xia/com -#fi - -# gfsv16 systhesis test -if [ ${model} == 'noahmp' ]; then -export COMROOT=/gpfs/dell2/emc/retros/noscrub/Youlong.Xia/com -export DCOMROOT=/gpfs/dell2/emc/retros/noscrub/Youlong.Xia/com -fi - -export COMINgdas=${COMINgdas:-${COMROOT}/gfs/prod} -export DCOMIN=${DCOM_IN:-${DCOMROOT}/prod} - - -#export DATA_IN=/gpfs/dell2/ptmp/$LOGNAME/tmpnwprd -#export jlogfile=$DATA_IN/jlogfile -export DATA=/gpfs/dell2/ptmp/$LOGNAME/tmpnwprd -export jlogfile=$DATA/jlogfile - -export SENDCOM=${SENDCOM:-YES} -export SENDECF=${SENDECF:-NO} -export SENDDBN=${SENDDBN:-NO} - diff --git a/driver/gdas/run_JGDAS_NCEPPOST.sh b/driver/gdas/run_JGDAS_NCEPPOST.sh deleted file mode 100755 index 374ba25b50..0000000000 --- a/driver/gdas/run_JGDAS_NCEPPOST.sh +++ /dev/null @@ -1,110 +0,0 @@ -#!/bin/sh - -#BSUB -o out_gdas_nemsio_p25_para_mpiio.%J -#BSUB -e err_gdas_nemsio_p25_para_mpiio.%J -#BSUB -J NEMSPOST -#BSUB -extsched 'CRAYLINUX[]' -R '1*{select[craylinux && !vnode]} + 96*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -#BSUB -W 00:40 -#BSUB -q dev -#BSUB -P GFS-T2O -#BSUB -M 1000 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/Hui-Ya.Chuang/nems_sample_output_T1534 - -set -x - -# specify user's own post working directory for testing -export svndir=/gpfs/hps/emc/global/noscrub/Hui-Ya.Chuang/post_trunk -export MP_LABELIO=yes -export OMP_NUM_THREADS=1 -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2048M -export MP_LABELIO=yes -export MP_STDOUTMODE=ordered - -############################################ -# Loading module -############################################ -. $MODULESHOME/init/ksh -module load PrgEnv-intel ESMF-intel-haswell/3_1_0rp5 cfp-intel-sandybridge iobuf craype-hugepages2M craype-haswell -#module load cfp-intel-sandybridge/1.1.0 -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_envir/1.1.0 -module load prod_util/1.0.4 -module load grib_util/1.0.3 - -# specify PDY (the cycle start yyyymmdd) and cycle -export PDY=20170212 -export cyc=00 -export cycle=t${cyc}z - - -# specify the directory environment for executable, it's either para or prod -export envir=prod - -# set up running dir - -export user=$(whoami) -export DATA=/gpfs/hps/ptmp/${user}/gdas.${PDY}${cyc}_nemsio_mpiio -mkdir -p $DATA -cd $DATA -rm -f ${DATA}/* - -#################################### -# Specify RUN Name and model -#################################### -export NET=gfs -#export RUN=gdas - -#################################### -# Determine Job Output Name on System -#################################### -#export pgmout="OUTPUT.${pid}" -#export pgmerr=errfile - -#################################### -# SENDSMS - Flag Events on SMS -# SENDCOM - Copy Files From TMPDIR to $COMOUT -# SENDDBN - Issue DBNet Client Calls -# RERUN - Rerun posts from beginning (default no) -# VERBOSE - Specify Verbose Output in global_postgp.sh -#################################### -export SAVEGES=NO -export SENDSMS=NO -export SENDCOM=YES -export SENDDBN=NO -export RERUN=NO -export VERBOSE=YES - -export HOMEglobal=${svndir} -export HOMEgfs=${svndir} -export HOMEgdas=${svndir} - -############################################## -# Define COM directories -############################################## -export COMIN=/gpfs/hps/emc/global/noscrub/Hui-Ya.Chuang/para_look_alike/gdas.${PDY} -# specify my own COMOUT dir to mimic operations -export COMOUT=$DATA -mkdir -p $COMOUT - -date - -#export OUTTYP=4 -# need to set FIXglobal to global share superstructure if testing post in non -# super structure environement -export FIXglobal=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/q3fy17_final/global_shared.v14.1.0/fix -export APRUN="aprun -j 1 -n24 -N8 -d1 -cc depth" -export nemsioget=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/q3fy17_final/global_shared.v14.1.0/exec/nemsio_get - -export KEEPDATA=YES -#export POSTGRB2TBL=$HOMEglobal/parm/params_grib2_tbl_new -$HOMEgfs/jobs/JGDAS_NCEPPOST - -############################################################# - -date - -echo $? - - - diff --git a/driver/gdas/test_emcsfc.sh b/driver/gdas/test_emcsfc.sh deleted file mode 100755 index 5a89e75202..0000000000 --- a/driver/gdas/test_emcsfc.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/sh - -#-------------------------------------------------------------- -# Run the JGDAS_EMCSFC_SFC_PREP j-job on wcoss cray -# -# Invoke as follows: -# 'cat $script | bsub' -#-------------------------------------------------------------- - -#BSUB -oo emcsfc.log -#BSUB -eo emcsfc.log -#BSUB -q dev_shared -#BSUB -R rusage[mem=2000] -#BSUB -J emcsfc -#BSUB -P GFS-T2O -#BSUB -cwd . -#BSUB -W 0:03 - -set -x - -export cyc="00" -export job=emcsfc_sfc_prep_${cyc} -export KEEPDATA="YES" -export SENDECF="NO" -export SENDCOM="YES" -export RUN_ENVIR="nco" - -export DATA="/gpfs/hps/stmp/$LOGNAME/tmpnwprd/${job}" -export jlogfile="/gpfs/hps/stmp/$LOGNAME/jlogfile" - -module load prod_envir/1.1.0 - -export envir="prod" -export COMROOT="/gpfs/hps/stmp/${LOGNAME}"${COMROOT} - -export NWROOT="/gpfs/hps/emc/global/noscrub/George.Gayno/q3fy17_final" -export global_shared_ver="v14.1.0" - -module load grib_util/1.0.3 -module load prod_util/1.0.5 - -export jobid="LLgdas_emcsfc_sfc_prep" -export gdas_ver="v14.1.0" -$NWROOT/gdas.${gdas_ver}/jobs/JGDAS_EMCSFC_SFC_PREP - -exit 0 diff --git a/driver/gdas/test_exgdas_tropc.sh b/driver/gdas/test_exgdas_tropc.sh deleted file mode 100755 index 351c29c81b..0000000000 --- a/driver/gdas/test_exgdas_tropc.sh +++ /dev/null @@ -1,126 +0,0 @@ -#BSUB -J JGDAS_TROPC_TEST_06 -#BSUB -o /ptmpp2/Qingfu.Liu/com2/jgdas_tropc_06.o%J -#BSUB -e /ptmpp2/Qingfu.Liu/com2/jgdas_tropc_06.o%J -#BSUB -L /bin/sh -#BSUB -q debug -#BSUB -W 00:30 -#BSUB -cwd /ptmpp2/Qingfu.Liu/com2 -#BSUB -P GFS-T2O -##BSUB -R rusage[mem=5000] -##BSUB -R affinity[core] -#BSUB -n 1 -#BSUB -R span[ptile=1] -#BSUB -R affinity[cpu(32):distribute=balance] -#BSUB -a poe -#BSUB -x - -#%include -#%include -. /usrx/local/Modules/default/init/ksh -module use /nwprod2/modulefiles -module load grib_util/v1.0.1 -module load prod_util/v1.0.1 -module load ibmpe ics lsf -#module load grib_util/v1.0.1 -module use /nwpara2/modulefiles -module load util_shared/v1.0.3 - -set -x - -export OMP_NUM_THREADS=32 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:32 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -#export envir=dev2 -export envir=prod -export cyc=06 -export job=jgdas_tropc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -export NWROOT=/global/save/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -export DATAROOT=/ptmpp2/Qingfu.Liu - -#export COMROOT=/com2 -export COMROOT=/ptmpp2/Qingfu.Liu/com2 -export COMDATEROOT=/com2 -export DATA_DUMPJB=/ptmpp2/Qingfu.Liu/com2/111 - -#export DCOMROOT=/dcom - -export COMROOTp1=/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t00z - -#which setpdy.sh -setpdy.sh -. PDY - -#export PDY=20160216 - -export COMIN=/com/gfs/prod/gdas.${PDY} - -#export utilscript=/nwprod2/util/ush -#export utilexec=/nwprod2/util/exec -#export utilities=/nwprod2/util/ush -#export HOMEutil=/nwprod2/util -#export HOMEgfs=/nwprod2/util -#export HOMEgraph=/nwprod2/util - -export utilscript=$NWROOT/util/ush -export utilexec=$NWROOT/util/exec -export utilities=$NWROOT/util/ush -export HOMEutil=$NWROOT/util -#export HOMEgfs=$NWROOT/util -export HOMEgraph=$NWROOT/util - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -#export shared_global_home=$NWROOT/shared_nco_20160129 -export HOMEgfs=$NWROOT/gfs.v13.0.0 -#export HOMEgdas=$NWROOT/gdas.v13.0.0 -export HOMEgdas=$NWROOT/gdas_nco_20160129 - -# CALL executable job script here -#export HOMERELO=${HOMEgdas} -#export HOMESYND=${HOMERELO} -#export envir_getges=prod -$HOMEgdas/jobs/JGDAS_TROPC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job that creates GFS TC track forecasts -###################################################################### - -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gdas/test_gdas_analysis_high.sh b/driver/gdas/test_gdas_analysis_high.sh deleted file mode 100755 index b57727c2d2..0000000000 --- a/driver/gdas/test_gdas_analysis_high.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_analysis_high.o%J -#BSUB -e gdas_analysis_high.o%J -#BSUB -J gdas_analysis_high -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=240 -export ntasks=480 -export ptile=2 -export threads=12 - -export CDATE=2017030806 - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_analysis_high_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export crtm_ver=2.2.4 -export prod_envir_ver=1.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load crtm-intel/${crtm_ver} -module load prod_envir/$prod_envir_ver -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export OMP_NUM_THREADS_CY=24 -export NTHREADS=$OMP_NUM_THREADS_CY -export NTHREADS_GSI=$threads -export NTHSTACK=1024000000 - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_analysis_high -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ANALYSIS_HIGH - -exit diff --git a/driver/gdas/test_gdas_enkf_fcst.sh b/driver/gdas/test_gdas_enkf_fcst.sh deleted file mode 100755 index 097cb2711c..0000000000 --- a/driver/gdas/test_gdas_enkf_fcst.sh +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_fcst.o%J -#BSUB -e gdas_enkf_fcst.o%J -#BSUB -J gdas_enkf_fcst -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -# 20 nodes = 18 compute nodes + 2 i/o nodes -# set WRT_GROUP=2 for 2 i/o nodes (see ../parm/gdas_enkf_fcst.parm) -# set WRTPE_PER_GROUP=4 to match ptile -export NODES=20 -export ntasks=80 -export ptile=4 -export threads=6 - -export CDATE=2017011900 -export ENSGRP=1 - -export grp=$ENSGRP -if [[ $grp -lt 10 ]]; then - export grp=0$grp -fi - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_fcst_grp${grp}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages4M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true -export NTHREADS_EFCS=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_fcst -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_FCST - -exit diff --git a/driver/gdas/test_gdas_enkf_inflate_recenter.sh b/driver/gdas/test_gdas_enkf_inflate_recenter.sh deleted file mode 100755 index d188ac80e2..0000000000 --- a/driver/gdas/test_gdas_enkf_inflate_recenter.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_inflate_recenter.o%J -#BSUB -e gdas_enkf_inflate_recenter.o%J -#BSUB -J gdas_enkf_inflate_recenter -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x -ulimit -s unlimited -ulimit -a - -export NODES=14 -export ntasks=80 -export ptile=6 -export threads=1 - -export CDATE=2017011900 - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_inflate_recenter_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages2M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export NTHREADS_ENKF=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_inflate_recenter -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_INFLATE_RECENTER - -exit diff --git a/driver/gdas/test_gdas_enkf_innovate_obs.sh b/driver/gdas/test_gdas_enkf_innovate_obs.sh deleted file mode 100755 index 15313e809a..0000000000 --- a/driver/gdas/test_gdas_enkf_innovate_obs.sh +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_innovate_obs.o%J -#BSUB -e gdas_enkf_innovate_obs.o%J -#BSUB -J gdas_enkf_innovate_obs -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=12 -export ntasks=144 -export ptile=12 -export threads=2 - -export CDATE=2017030806 -export ENSGRP=1 - -export grp=$ENSGRP -if [[ $grp -lt 20 ]]; then - export grp=0$grp -fi - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_innovate_obs_grp${grp}_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export crtm_ver=2.2.4 -export prod_envir_ver=1.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load crtm-intel/${crtm_ver} -module load prod_envir/$prod_envir_ver -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export OMP_NUM_THREADS_CY=24 -export NTHREADS=$OMP_NUM_THREADS_CY -export NTHREADS_GSI=$threads -export NTHSTACK=1024000000 - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_innovate_obs -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_INNOVATE_OBS - -exit diff --git a/driver/gdas/test_gdas_enkf_post.sh b/driver/gdas/test_gdas_enkf_post.sh deleted file mode 100755 index 472e3f26ba..0000000000 --- a/driver/gdas/test_gdas_enkf_post.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_post.o%J -#BSUB -e gdas_enkf_post.o%J -#BSUB -J gdas_enkf_post -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=80 -export ntasks=80 -export ptile=1 -export threads=24 - -export CDATE=2017011900 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_post_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages4M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export NTHREADS_EPOS=$threads -export FORT_BUFFERED=true - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_post -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_POST - -exit diff --git a/driver/gdas/test_gdas_enkf_select_obs.sh b/driver/gdas/test_gdas_enkf_select_obs.sh deleted file mode 100755 index 8636b74710..0000000000 --- a/driver/gdas/test_gdas_enkf_select_obs.sh +++ /dev/null @@ -1,89 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_select_obs.o%J -#BSUB -e gdas_enkf_select_obs.o%J -#BSUB -J gdas_enkf_select_obs -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=12 -export ntasks=144 -export ptile=12 -export threads=2 - -export CDATE=2017030806 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_select_obs_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export crtm_ver=2.2.4 -export prod_envir_ver=1.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load crtm-intel/${crtm_ver} -module load prod_envir/$prod_envir_ver -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export OMP_NUM_THREADS_CY=24 -export NTHREADS=$OMP_NUM_THREADS_CY -export NTHREADS_GSI=$threads -export NTHSTACK=1024000000 - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_select_obs -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_SELECT_OBS - -exit diff --git a/driver/gdas/test_gdas_enkf_update.sh b/driver/gdas/test_gdas_enkf_update.sh deleted file mode 100755 index cf0d8f4318..0000000000 --- a/driver/gdas/test_gdas_enkf_update.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_enkf_update.o%J -#BSUB -e gdas_enkf_update.o%J -#BSUB -J gdas_enkf_update -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -export NODES=40 -export ntasks=240 -export ptile=6 -export threads=4 - -export CDATE=2017030806 - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_enkf_update_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.6 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export NTHREADS_ENKF=$nthreads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_enkf_update -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_ENKF_UPDATE - -exit diff --git a/driver/gdas/test_gdas_forecast_high.sh b/driver/gdas/test_gdas_forecast_high.sh deleted file mode 100755 index a8d8af6116..0000000000 --- a/driver/gdas/test_gdas_forecast_high.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gdas_forecast_high.o%J -#BSUB -e gdas_forecast_high.o%J -#BSUB -J gdas_forecast_high -#BSUB -q devonprod -#BSUB -M 768 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gdas.v14.1.0/driver - -set -x - -# 55 nodes = 49 compute nodes + 6 i/o nodes -# set WRT_GROUP=6 for 6 i/o nodes (see ../parm/gdas_forecast_high.parm) -# set WRTPE_PER_GROUP=4 to match ptile -export NODES=55 -export ntasks=220 -export ptile=4 -export threads=2 - -export CDATE=2017012506 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gdas_forecast_high_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gdas_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export gfs_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.5 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages16M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=1024m -export OMP_NUM_THREADS=$threads -export NTHREADS=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gdas.${gdas_ver}/driver/para_config.gdas_forecast_high -export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_FORECAST_HIGH - -exit - diff --git a/driver/gdas/test_gdas_gldas.sh b/driver/gdas/test_gdas_gldas.sh deleted file mode 100755 index 92e40ced3d..0000000000 --- a/driver/gdas/test_gdas_gldas.sh +++ /dev/null @@ -1,127 +0,0 @@ -#!/bin/sh - -#BSUB -o /gpfs/dell2/ptmp/Youlong.Xia/gdas_gldas.o%J -#BSUB -e /gpfs/dell2/ptmp/Youlong.Xia/gdas_gldas.o%J -#BSUB -P NLDAS-T2O -#BSUB -J jgdas_gldas_12 -#BSUB -W 01:00 -#BSUB -q dev -#BSUB -n 112 # number of tasks -#BSUB -R span[ptile=28] # tasks per node -#BSUB -R affinity[core(1):distribute=balance] -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' - -set -x - -date - -export NODES=4 -export ntasks=112 -export ptile=28 -export threads=1 - -export launcher="mpirun -n" -export npe_gaussian=6 -export npe_gldas=112 -export APRUN_GAUSSIAN="$launcher $npe_gaussian" -export APRUN_GLDAS="$launcher $npe_gldas" - -export CDATE=${CDATE:-2019110700} - -############################################################# -export KMP_AFFINITY=disabled - -export PDY=$(date -u +%Y%m%d) -export PDY=$(echo $CDATE | cut -c1-8) - -export PDY1=$(expr $PDY - 1) - -export cyc=$(echo $CDATE | cut -c9-10) -export cycle=t${cyc}z - -set -xa -export PS4='$SECONDS + ' -date - -#################################### -## Load theUtilities module -##################################### -module purge -module load EnvVars/1.0.2 -module load ips/18.0.1.163 -module load CFP/2.0.1 -module load impi/18.0.1 -module load lsf/10.1 -module load prod_envir/1.0.2 -module load prod_util/1.1.4 -module load grib_util/1.1.0 -module load NetCDF/4.5.0 -########################################### -# Now set up environment -########################################### -module list - -############################################ -# GDAS META PRODUCT GENERATION -############################################ -# set envir=prod or para to test with data in prod or para -# export envir=prod - export envir=para - -export RUN=${RUN:-gdas} - -export SENDCOM=YES -export KEEPDATA=YES -export job=gdas_gldas_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -############################################## -# Define COM, COMOUTwmo, COMIN directories -############################################## - export HOMEgw=/gpfs/dell2/emc/modeling/noscrub/$LOGNAME/global-workflow -if [ $envir = "prod" ] ; then -# This setting is for testing with GDAS (production) - export HOMEgldas=/nwprod/gldas.${gldas_ver} - export COMIN=/gpfs/dell1/nco/ops/com/gfs/prod/${RUN}.${PDY} ### NCO PROD - export COMROOT=/gpfs/dell1/nco/ops/com - export DCOMROOT=/gpfs/dell1/nco/ops/dcom -elif [ $envir = "para" ] ; then -# This setting is for testing with GDAS (production) - export HOMEgldas=${HOMEgldas:-$HOMEgfs/sorc/gldas.fd} - export COMIN=/gpfs/dell1/nco/ops/com/gfs/prod/${RUN}.${PDY} ### NCO PROD - export COMROOT=/gpfs/dell1/nco/ops/com - export DCOMROOT=/gpfs/dell1/nco/ops/dcom -else -# export COMIN=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/prfv3rt1/${RUN}.${PDY}/${cyc}/nawips ### EMC PARA Realtime -# export COMINgdas=/gpfs/dell3/ptmp/emc.glopara/ROTDIRS/prfv3rt1/${RUN} ### EMC PARA Realtime - export workdir=${workdir:-$HOMEgfs} - export HOMEgldas=$workdir/sorc/gldas.fd - export COMROOT=$workdir/com - export DCOMROOT=$workdir/dcom - export COMINgdas=$COMROOT - export DCOMIN=$DCOMROOT - export COMIN=$workdir/comin - export COMOUT=$workdir/comout -fi - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTncdc $COMOUTukmet $COMOUTecmwf -fi - -# Set user specific variables -############################################################# -#export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$HOMEgw/driver/gdas/para_config.gdas_gldas -#export JOBGLOBAL=$NWTEST/gdas.${gdas_ver}/jobs -export JOBGLOBAL=$HOMEgldas/jobs - -############################################################# -# Execute job -############################################################# -echo $JOBGLOBAL/JGDAS_ATMOS_GLDAS -$JOBGLOBAL/JGDAS_ATMOS_GLDAS - -exit - diff --git a/driver/gdas/test_jgdas_tropc_cray.sh b/driver/gdas/test_jgdas_tropc_cray.sh deleted file mode 100755 index c24b17a903..0000000000 --- a/driver/gdas/test_jgdas_tropc_cray.sh +++ /dev/null @@ -1,133 +0,0 @@ -#!/bin/bash -#BSUB -J JGDAS_TROPC -#BSUB -W 0:30 -####BSUB -extsched 'CRAYLINUX[]' -R '1*{order[slots] select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -BSUB -extsched 'CRAYLINUX[]' -#BSUB -o /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/test_jgdas_tropc.o%J -#BSUB -e /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/test_jgdas_tropc.o%J -#BSUB -q "dev" -#BSUB -P "GFS-T2O" -#BSUB -M 500 - -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_util -module unload grib_util -module load grib_util/1.0.3 -module load util_shared/1.0.3 -##module load crtm-intel/2.2.3 -module load PrgEnv-intel craype cray-mpich ESMF-intel-haswell/3_1_0rp5 -module load NCL-gnu-sandybridge/6.3.0 -module load gcc -module list - -set -x - -export OMP_NUM_THREADS=12 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:24 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=para -#export envir=prod -export cyc=06 -export job=jgdas_tropc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -#export NWROOT=/global/save/Qingfu.Liu -export NWROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu - -export DATAROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export DATAROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu -#export COMROOT=/com2 -export COMROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export COMDATEROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMDATEROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -#export COMDATEROOT=/gpfs/tp2/nco/ops/com -export DATA_DUMPJB=/gpfs/hps/ptmp/Qingfu.Liu/com/111 - -#export DCOMROOT=/dcom -export DCOMROOT=/gpfs/tp1/nco/ops/dcom - -#export COMROOTp1=/gpfs/gp1/nco/ops/com -export COMROOTp1=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMROOTp1=/gpfs/tp2/nco/ops/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t${cyc}z -export LOUD=on -export BACK=on - -##which setpdy.sh -##setpdy.sh -##. PDY - -#export PDY=20150723 -#export PDY=20140814 -export PDY=20170108 - -#export COMIN=/gpfs/tp2/nco/ops/com/gfs/prod/gdas.${PDY} -export COMIN=/gpfs/hps/ptmp/Qingfu.Liu/com/gfs/para/gdas.${PDY} -#export COMIN=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/para/gdas.${PDY} - -#export NWPROOT=/gpfs/tp1/nco/ops/nwprod/util -#export utilscript=$NWPROOT/util/ush -#export utilexec=$NWPROOT/util/exec -#export utilities=$NWPROOT/util/ush -#export HOMEutil=$NWPROOT/util -#export HOMEgraph=$NWPROOT/util - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -#export DUMP=/gpfs/hps/nco/ops/nwprod/hwrf_dump.v3.2.1/ush/dumpjb -#export HOMEobsproc_dump=/gpfs/hps/nco/ops/nwprod/hwrf_dump.v3.2.1 -export DUMP=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/obsproc_dump.tkt-351.crayport/ush/dumpjb -export HOMEobsproc_dump=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/obsproc_dump.tkt-351.crayport -#export FIXobsproc_bufr_dumplist=/gpfs/hps/nco/ops/nwprod/obsproc_bufr_dumplist.v1.2.0/fix -export FIXobsproc_bufr_dumplist=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/fix -export HOMEobsproc_shared_bufr_dumplist=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver -#export HOMEobroc_bufr_dumplist=/gpfs/hps/nco/ops/nwprod/obsproc_bufr_dumplist.v1.2.0 - -export HOMEgfs=$NWROOT/gfs.v14.1.0 -export HOMEgdas=$NWROOT/gdas.v14.1.0 - -# CALL executable job script here -#export HOMERELO=${HOMEgdas} -#export HOMESYND=${HOMERELO} -#export envir_getges=prod -$HOMEgdas/jobs/JGDAS_TROPC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job that creates GFS TC track forecasts -###################################################################### - -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gdas/test_jgdas_tropcy_qc_reloc.ecf b/driver/gdas/test_jgdas_tropcy_qc_reloc.ecf deleted file mode 100755 index c413513dcf..0000000000 --- a/driver/gdas/test_jgdas_tropcy_qc_reloc.ecf +++ /dev/null @@ -1,126 +0,0 @@ -#BSUB -J Relocation_TEST_06 -#BSUB -o /ptmpp2/Qingfu.Liu/com2/gdas_tropcy_qc_reloc_06.o%J -#BSUB -e /ptmpp2/Qingfu.Liu/com2/gdas_tropcy_qc_reloc_06.o%J -#BSUB -L /bin/sh -#BSUB -q debug -#BSUB -W 00:30 -#BSUB -cwd /ptmpp2/Qingfu.Liu/com2 -#BSUB -P GFS-T2O -##BSUB -R rusage[mem=5000] -##BSUB -R affinity[core] -#BSUB -n 7 -#BSUB -R span[ptile=1] -#BSUB -R affinity[cpu(32):distribute=balance] -#BSUB -a poe -#BSUB -x - -#%include -#%include -. /usrx/local/Modules/default/init/ksh -module use /nwprod2/modulefiles -module load grib_util/v1.0.1 -module load prod_util/v1.0.1 -module load ibmpe ics lsf - -set -x - -export OMP_NUM_THREADS=32 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:32 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=dev2 -export cyc=06 -export job=gdas_tropcy_qc_reloc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -export NWROOT=/global/save/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -export DATAROOT=/ptmpp2/Qingfu.Liu - -#export COMROOT=/com2 -export COMROOT=/ptmpp2/Qingfu.Liu/com2 -export COMDATEROOT=/com2 - -#export DCOMROOT=/dcom - -export COMROOTp1=/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t00z - -#which setpdy.sh -setpdy.sh -. PDY - -export COMINgfs=/com/gfs/prod/gfs.${PDY} -export COMINgdas=/com/gfs/prod/gdas.${PDY} - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -export shared_global_home=$NWROOT/shared_nco_20160129 -export gfs_global_home=$NWROOT/gfs_nco_20160129 -export gdas_global_home=$NWROOT/gdas_nco_20160129 - -export files_override=F - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gdas_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -###################################################################### -############################################################# -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# # syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# # tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgdas_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gdas.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################## -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gdas/test_jgdas_tropcy_qc_reloc_cray.sh b/driver/gdas/test_jgdas_tropcy_qc_reloc_cray.sh deleted file mode 100755 index 4e757e5085..0000000000 --- a/driver/gdas/test_jgdas_tropcy_qc_reloc_cray.sh +++ /dev/null @@ -1,154 +0,0 @@ -#!/bin/bash -#BSUB -J t1534 -#BSUB -W 0:30 -#BSUB -extsched 'CRAYLINUX[]' -R '1*{order[slots] select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -#BSUB -o /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/gdas_tropcy_qc_reloc_06.o%J -#BSUB -e /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gdas.v14.1.0/driver/gdas_tropcy_qc_reloc_06.o%J -###BSUB -o t574.stdout.%J -###BSUB -e t574.stderr.%J -#BSUB -q "dev" -#BSUB -P "GFS-T2O" -#BSUB -M 200 -###BSUB -M "60" - - -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_util -module load prod_envir/1.1.0 -module unload grib_util -module load grib_util/1.0.3 -##module load crtm-intel/2.2.3 -module load PrgEnv-intel craype cray-mpich ESMF-intel-haswell/3_1_0rp5 -module list - -#export MKL_CBWR=AVX -#ulimit -s unlimited -#ulimit -a - -set -x - -export OMP_NUM_THREADS=24 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:24 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=para -export cyc=06 -export job=gdas_tropcy_qc_reloc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -#export NWROOT=/global/save/Qingfu.Liu -export NWROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -#export DATAROOT=/ptmpp2/Qingfu.Liu -export DATAROOT=/gpfs/hps/ptmp/Qingfu.Liu - -#export COMROOT=/com2 -#export COMROOT=/ptmpp2/Qingfu.Liu/com2 -#export COMROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMDATEROOT=/com -export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export COMDATEROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -#export DCOMROOT=/dcom - -#export COMROOTp1=/gpfs/gp1/nco/ops/com -#export COMROOTp1=/gpfs/tp1/nco/ops/com -export COMROOTp1=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t${cyc}z - -#which setpdy.sh -#setpdy.sh -#. PDY - -export PDY=20140814 -##export PDY=20150723 - -#export COMINgfs=/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/com/gfs/prod/gdas.${PDY} -#export COMINgfs=/gpfs/tp1/nco/ops/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/gpfs/tp1/nco/ops/com/gfs/prod/gdas.${PDY} -export COMINgfs=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/$envir/gfs.${PDY} -export COMINgdas=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/$envir/gdas.${PDY} -export ARCHSYND=${COMINsyn:-$(compath.py gfs/prod/syndat)} -export HOMENHC=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/guidance/storm-data/ncep -#export GETGES_COM=/gpfs/tp1/nco/ops/com -export GETGES_COM=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export GESROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -export shared_global_home=$NWROOT/global_shared.v14.1.0 -export gfs_global_home=$NWROOT/gfs.v14.1.0 -export gdas_global_home=$NWROOT/gdas.v14.1.0 - -export files_override=F -export PROCESS_TROPCY=NO -export copy_back=NO -export SENDCOM=NO -export APRNRELOC="time aprun -b -j1 -n7 -N1 -d24 -cc depth " -export APRNGETTX="time aprun -q -j1 -n1 -N1 -d1 -cc depth" -#export APRNRELOC="time aprun -b -j0 -n7 -N1 -d32 -cc depth" - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gdas_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -###################################################################### -############################################################# -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# # syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# # tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgdas_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gdas.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################## -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gdas/test_jgdas_verfozn.sh b/driver/gdas/test_jgdas_verfozn.sh deleted file mode 100755 index 699849b782..0000000000 --- a/driver/gdas/test_jgdas_verfozn.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_verfozn.o%J -#BSUB -e gdas_verfozn.o%J -#BSUB -J gdas_verfozn -#BSUB -q dev_shared -#BSUB -n 1 -#BSUB -R affinity[core] -#BSUB -M 100 -#BSUB -W 00:05 -#BSUB -a poe -#BSUB -P GFS-T2O - -##------------------------------------------------------------ -## This is the test driver script for the wcoss/ibm systems -## to run the JGDAS_VERFOZN job. -##------------------------------------------------------------ - -set -x - -export PDATE=${PDATE:-2018020806} -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} - - -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_verfozn.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=${envir:-test} - -me=$(hostname | cut -c1) -export DATAROOT=${DATAROOT:-/gpfs/${me}d2/emc/da/noscrub/${LOGNAME}/test_data} -export COMROOT=${COMROOT:-/ptmpp1/${LOGNAME}/com} -export OZN_WORK_DIR=${OZN_WORK_DIR:-/stmpp1/${LOGNAME}/oznmon.${pid}} - -#------------------------------------------------------------ -# Specify versions -# -export gfs_ver=v15.0.0 - - -#------------------------------------------------------------ -# Load modules -# -. /usrx/local/Modules/3.2.9/init/ksh -module use /nwprod2/modulefiles -module load prod_util -module load util_shared - - -module list - - -#------------------------------------------------------------ -# WCOSS environment settings -# -export POE=YES - - -#------------------------------------------------------------ -# Set user specific variables -# -export OZNMON_SUFFIX=${OZNMON_SUFFIX:-testozn} -export NWTEST=${NWTEST:-/gpfs/${me}d2/emc/da/noscrub/${LOGNAME}/gfs.${gfs_ver}} - -export HOMEgfs_ozn=${HOMEgfs_ozn:-${NWTEST}} -export SCRgfs_ozn=${SCRgfs_ozn:-${HOMEgfs_ozn}/scripts} -JOBgfs_ozn=${JOBgfs_ozn:-${HOMEgfs_ozn}/jobs} - -export HOMEoznmon=${HOMEoznmon:-${NWTEST}} -export COM_IN=${COM_IN:-$DATAROOT} -export OZN_TANKDIR=${OZN_TANKDIR:-${COMROOT}/${OZNMON_SUFFIX}} - -#------------------------------------------------------------ -# Execute job -# -${JOBgfs_ozn}/JGDAS_VERFOZN - -exit - diff --git a/driver/gdas/test_jgdas_verfozn_cray.sh b/driver/gdas/test_jgdas_verfozn_cray.sh deleted file mode 100755 index fb2457624a..0000000000 --- a/driver/gdas/test_jgdas_verfozn_cray.sh +++ /dev/null @@ -1,79 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_verfozn.o%J -#BSUB -e gdas_verfozn.o%J -#BSUB -J gdas_verfozn -#BSUB -q dev -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -P GFS-T2O -#BSUB -R "select[mem>80] rusage[mem=80]" - -##------------------------------------------------------------ -## This is the test driver script for the cray systems -## to run the JGDAS_VERFOZN job. -##------------------------------------------------------------ - -set -x - -export PDATE=${PDATE:-2018020812} -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} - - -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_verfozn.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=${envir:-test} - -me=$(hostname | cut -c1) -export DATAROOT=${DATAROOT:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}/test_data} -export COMROOT=${COMROOT:-/gpfs/hps2/ptmp/${LOGNAME}/com} -export OZN_WORK_DIR=${OZN_WORK_DIR:-/gpfs/hps2/stmp/${LOGNAME}/oznmon.${pid}} - -#------------------------------------------------------------ -# Specify versions -# -export gfs_ver=v15.0.0 - - -#------------------------------------------------------------ -# Load modules -# -. $MODULESHOME/init/ksh - -module load prod_util -module load util_shared - -module list - - -#------------------------------------------------------------ -# WCOSS environment settings -# -export POE=YES - - -#------------------------------------------------------------ -# Set user specific variables -# -export OZNMON_SUFFIX=${OZNMON_SUFFIX:-testozn} -export NWTEST=${NWTEST:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}/gfs.${gfs_ver}} - -export HOMEgfs_ozn=${HOMEgfs_ozn:-${NWTEST}} -export SCRgfs_ozn=${SCRgfs_ozn:-${HOMEgfs_ozn}/scripts} -JOBgfs_ozn=${JOBgfs_ozn:-${HOMEgfs_ozn}/jobs} - -export HOMEoznmon=${HOMEoznmon:-${NWTEST}} -export COM_IN=${COM_IN:-$DATAROOT} -export OZN_TANKDIR=${OZN_TANKDIR:-${COMROOT}/${OZNMON_SUFFIX}} - -#------------------------------------------------------------ -# Execute job -# -${JOBgfs_ozn}/JGDAS_VERFOZN - -exit - diff --git a/driver/gdas/test_jgdas_verfrad.sh b/driver/gdas/test_jgdas_verfrad.sh deleted file mode 100755 index f286185185..0000000000 --- a/driver/gdas/test_jgdas_verfrad.sh +++ /dev/null @@ -1,74 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_verfrad.o%J -#BSUB -e gdas_verfrad.o%J -#BSUB -J gdas_verfrad -#BSUB -q dev_shared -#BSUB -n 1 -#BSUB -R affinity[core] -#BSUB -M 100 -#BSUB -W 00:20 -#BSUB -a poe -#BSUB -P GFS-T2O - -set -x - -export PDATE=${PDATE:-2018022112} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_verfrad.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -me=$(hostname | cut -c1) -export DATAROOT=${DATAROOT:-/gpfs/${me}d2/emc/da/noscrub/${LOGNAME}/test_data} -export COMROOT=${COMROOT:-/ptmpp1/$LOGNAME/com} - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. /usrx/local/Modules/3.2.9/init/ksh -module use /nwprod2/modulefiles -module load prod_util -module load util_shared - - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export RADMON_SUFFIX=${RADMON_SUFFIX:-testrad} -export NWTEST=${NWTEST:-/gpfs/${me}d2/emc/da/noscrub/Edward.Safford/gfs.${gfs_ver}} -export HOMEgfs=${HOMEgfs:-${NWTEST}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -export HOMEradmon=${HOMEradmon:-${NWTEST}} -export COM_IN=${COM_IN:-${DATAROOT}} -export TANKverf=${TANKverf:-${COMROOT}/${RADMON_SUFFIX}} - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_VERFRAD - -exit - diff --git a/driver/gdas/test_jgdas_verfrad_cray.sh b/driver/gdas/test_jgdas_verfrad_cray.sh deleted file mode 100755 index d9668984ad..0000000000 --- a/driver/gdas/test_jgdas_verfrad_cray.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_verfrad.o%J -#BSUB -e gdas_verfrad.o%J -#BSUB -J gdas_verfrad -#BSUB -q dev -#BSUB -M 100 -#BSUB -W 00:20 -#BSUB -P GFS-T2O -#BSUB -R "select[mem>100] rusage[mem=100]" - -set -x - -export PDATE=${PDATE:-2016100106} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_verfrad.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 -#export global_shared_ver=v14.1.0 -#export gdas_radmon_ver=v2.0.0 -#export radmon_shared_ver=v2.0.4 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/ksh - -module load prod_util - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export DATAROOT=${DATAROOT:-/gpfs/hps3/emc/da/noscrub/$LOGNAME/test_data} -export COMROOT=${COMROOT:-/gpfs/hps2/ptmp/$LOGNAME/com} -export RADMON_SUFFIX=${RADMON_SUFFIX:-testrad} -export NWTEST=${NWTEST:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}/gfs.${gfs_ver}} -export HOMEgfs=${HOMEgfs:-${NWTEST}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -export HOMEradmon=${HOMEradmon:-${NWTEST}} -export COM_IN=${COM_IN:-${DATAROOT}} -export TANKverf=${TANKverf:-${COMROOT}/${RADMON_SUFFIX}} - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_VERFRAD - -exit - diff --git a/driver/gdas/test_jgdas_vminmon.sh b/driver/gdas/test_jgdas_vminmon.sh deleted file mode 100755 index 983a0e7c24..0000000000 --- a/driver/gdas/test_jgdas_vminmon.sh +++ /dev/null @@ -1,73 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_vminmon.o%J -#BSUB -e gdas_vminmon.o%J -#BSUB -J gdas_vminmon -#BSUB -q dev_shared -#BSUB -n 1 -#BSUB -R affinity[core] -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -a poe -#BSUB -P GFS-T2O - -set -x - -export PDATE=${PDATE:-2016030706} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -me=$(hostname | cut -c1) - -export job=gdas_vminmon.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/${me}d2/emc/da/noscrub/Edward.Safford/test_data -export COMROOT=/ptmpp1/$LOGNAME/com - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. /usrx/local/Modules/3.2.9/init/ksh -module use /nwprod2/modulefiles -module load grib_util -module load prod_util - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export MINMON_SUFFIX=${MINMON_SUFFIX:-testminmon} -export NWTEST=${NWTEST:-/gpfs/${me}d2/emc/da/noscrub/Edward.Safford} -export HOMEgfs=${HOMEgfs:-${NWTEST}/gfs.${gfs_ver}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} - -export COM_IN=${COM_IN:-${DATAROOT}} -export M_TANKverf=${M_TANKverf:-${COMROOT}/${MINMON_SUFFIX}} - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_VMINMON - -exit - diff --git a/driver/gdas/test_jgdas_vminmon_cray.sh b/driver/gdas/test_jgdas_vminmon_cray.sh deleted file mode 100755 index 6078d2ade4..0000000000 --- a/driver/gdas/test_jgdas_vminmon_cray.sh +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/ksh - -#BSUB -o gdas_vminmon.o%J -#BSUB -e gdas_vminmon.o%J -#BSUB -J gdas_vminmon -#BSUB -q dev -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -P GFS-T2O -#BSUB -R "select[mem>80] rusage[mem=80]" - -set -x - -export PDATE=${PDATE:-2016030700} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gdas_vminmon.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=${DATAROOT:-/gpfs/hps3/emc/da/noscrub/$LOGNAME/test_data} -export COMROOT=${COMROOT:-/gpfs/hps2/ptmp/$LOGNAME/com} - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/ksh - -module load prod_util -module load pm5 - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export MINMON_SUFFIX=${MINMON_SUFFIX:-testminmon} -export NWTEST=${NWTEST:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}} -export HOMEgfs=${HOMEgfs:-${NWTEST}/gfs.${gfs_ver}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -export COM_IN=${COM_IN:-${DATAROOT}} -export M_TANKverf=${M_TANKverf:-${COMROOT}/${MINMON_SUFFIX}} - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGDAS_VMINMON - -exit - diff --git a/driver/gfs/para_config.gfs_analysis b/driver/gfs/para_config.gfs_analysis deleted file mode 100755 index e910b5a1f7..0000000000 --- a/driver/gfs/para_config.gfs_analysis +++ /dev/null @@ -1,31 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=YES -export SENDCOM=YES -export gesenvir=para -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export KEEPDATA=NO - - -# Set paths -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files -if [ $CDATE -ge 2017030800 -a $CDATE -lt 2017031312 ] ; then - SATINFO=/gpfs/hps/emc/global/noscrub/emc.glopara/para_gfs/prnemsrn/global_satinfo.txt -fi - - -# Set scripts -GETGESSH=$HOMEgsm/ush/getges.sh - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' diff --git a/driver/gfs/para_config.gfs_forecast_high b/driver/gfs/para_config.gfs_forecast_high deleted file mode 100755 index c1454c4110..0000000000 --- a/driver/gfs/para_config.gfs_forecast_high +++ /dev/null @@ -1,32 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gespath=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgfs=$NWTEST/gfs.${gfs_ver} -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - diff --git a/driver/gfs/para_config.gfs_forecast_low b/driver/gfs/para_config.gfs_forecast_low deleted file mode 100755 index c1454c4110..0000000000 --- a/driver/gfs/para_config.gfs_forecast_low +++ /dev/null @@ -1,32 +0,0 @@ -# Set variables -export NWROOT=$NWTEST -export COM_IN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para -export COM_OUT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/test -export SENDDBN=YES -export SAVEGES=NO -export GETGES_NWG=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gesenvir=para -export GETGES_COM=/gpfs/hps/ptmp/emc.glopara/com2 -export GESROOT=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export gespath=/gpfs/hps/ptmp/emc.glopara/com2/gfs/nwges2 -export KEEPDATA=NO - - -# Set paths -export HOMEgfs=$NWTEST/gfs.${gfs_ver} -export HOMEgsm=$NWTEST/global_shared.${global_shared_ver} - - -# Set fix files - - -# Set scripts - - -# Set executables - - -# Dev para jobs -export ERRSCRIPT='eval [[ $err = 0 ]]' - - diff --git a/driver/gfs/test_emcsfc.sh b/driver/gfs/test_emcsfc.sh deleted file mode 100755 index 477b1ddbc7..0000000000 --- a/driver/gfs/test_emcsfc.sh +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/sh - -#-------------------------------------------------------------- -# Run the JGFS_EMCSFC_SFC_PREP j-job on wcoss cray -# -# Invoke as follows: -# 'cat $script | bsub' -#-------------------------------------------------------------- - -#BSUB -oo emcsfc.log -#BSUB -eo emcsfc.log -#BSUB -q dev_shared -#BSUB -R rusage[mem=2000] -#BSUB -J emcsfc -#BSUB -P GFS-T2O -#BSUB -cwd . -#BSUB -W 0:03 - -set -x - -export cyc="00" -export job=emcsfc_sfc_prep_${cyc} -export KEEPDATA="YES" -export SENDECF="NO" -export SENDCOM="YES" -export RUN_ENVIR="nco" - -export DATA="/gpfs/hps/stmp/$LOGNAME/tmpnwprd/${job}" -export jlogfile="/gpfs/hps/stmp/$LOGNAME/jlogfile" - -module load prod_envir/1.1.0 - -export envir="prod" -export COMROOT="/gpfs/hps/stmp/${LOGNAME}"${COMROOT} - -export NWROOT="/gpfs/hps/emc/global/noscrub/George.Gayno/q3fy17_final" -export global_shared_ver="v14.1.0" - -module load grib_util/1.0.3 -module load prod_util/1.0.5 - -export jobid="LLgfs_emcsfc_sfc_prep" -export gfs_ver="v14.1.0" -$NWROOT/gfs.${gfs_ver}/jobs/JGFS_EMCSFC_SFC_PREP - -exit 0 diff --git a/driver/gfs/test_gfs_analysis.sh b/driver/gfs/test_gfs_analysis.sh deleted file mode 100755 index 4b697f6a7f..0000000000 --- a/driver/gfs/test_gfs_analysis.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gfs_analysis.o%J -#BSUB -e gfs_analysis.o%J -#BSUB -J gfs_analysis -#BSUB -q devonprod -#BSUB -M 3072 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/q3fy17_final/gfs.v14.1.0/driver - -set -x - -export NODES=240 -export ntasks=480 -export ptile=2 -export threads=12 - -export CDATE=2017040700 - - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gfs_analysis_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v14.1.0 -export global_shared_ver=v14.1.0 -export crtm_ver=2.2.4 -export prod_envir_ver=1.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.8 -export util_shared_ver=1.0.3 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load crtm-intel/${crtm_ver} -module load prod_envir/$prod_envir_ver -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load util_shared/$util_shared_ver -module load cfp-intel-sandybridge - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=2G -export OMP_NUM_THREADS=$threads -export FORT_BUFFERED=true - -export OMP_NUM_THREADS_CY=24 -export NTHREADS=$OMP_NUM_THREADS_CY -export NTHREADS_GSI=$threads -export NTHSTACK=1024000000 - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/q3fy17_final -export PARA_CONFIG=$NWTEST/gfs.${gfs_ver}/driver/para_config.gfs_analysis -export JOBGLOBAL=$NWTEST/gfs.${gfs_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_ANALYSIS - -exit diff --git a/driver/gfs/test_gfs_forecast_high.sh b/driver/gfs/test_gfs_forecast_high.sh deleted file mode 100755 index ceef917ee8..0000000000 --- a/driver/gfs/test_gfs_forecast_high.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gfs_forecast_high.o%J -#BSUB -e gfs_forecast_high.o%J -#BSUB -J gfs_forecast_high -#BSUB -q devonprod -#BSUB -M 768 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 02:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gfs.v14.1.0/driver - -set -x - -# 65 nodes = 59 compute nodes + 6 i/o nodes -# set WRT_GROUP=6 for 6 i/o nodes (see ../parm/gfs_forecast_high.parm) -# set WRTPE_PER_GROUP=4 to match ptile -export NODES=65 -export ntasks=260 -export ptile=4 -export threads=6 - -export CDATE=2017012506 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gfs_forecast_high_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export global_shared_ver=v14.1.0 -export gfs_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.5 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages16M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=1024m -export OMP_NUM_THREADS=$threads -export NTHREADS=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gfs.${gfs_ver}/driver/para_config.gfs_forecast_high -export JOBGLOBAL=$NWTEST/gfs.${gfs_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_FORECAST_HIGH - -exit - diff --git a/driver/gfs/test_gfs_forecast_low.sh b/driver/gfs/test_gfs_forecast_low.sh deleted file mode 100755 index af36679da0..0000000000 --- a/driver/gfs/test_gfs_forecast_low.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/sh --login - -#BSUB -L /bin/sh -#BSUB -P GFS-T2O -#BSUB -o gfs_forecast_low.o%J -#BSUB -e gfs_forecast_low.o%J -#BSUB -J gfs_forecast_low -#BSUB -q devonprod -#BSUB -M 768 -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 01:00 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gfs.v14.1.0/driver - -set -x - -# 20 nodes = 18 compute nodes + 2 i/o nodes -# set WRT_GROUP=2 for 2 i/o nodes (see ../parm/gfs_forecast_low.parm) -# set WRTPE_PER_GROUP=4 to match ptile -export NODES=20 -export ntasks=80 -export ptile=4 -export threads=6 - -export CDATE=2017012506 - -############################################################# -# Specify whether the run is production or development -############################################################# -export RUN_ENVIR=para -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export job=gfs_forecast_low_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=/gpfs/hps/stmp/$LOGNAME/test - - -############################################################# -# Specify versions -############################################################# -export global_shared_ver=v14.1.0 -export gfs_ver=v14.1.0 -export grib_util_ver=1.0.3 -export prod_util_ver=1.0.5 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/sh -module load grib_util/$grib_util_ver -module load prod_util/$prod_util_ver -module load craype-hugepages16M - -module list - - -############################################################# -# WCOSS_C environment settings -############################################################# -export KMP_AFFINITY=disabled -export OMP_STACKSIZE=1024m -export OMP_NUM_THREADS=$threads -export NTHREADS=$threads - - -############################################################# -# Set user specific variables -############################################################# -export NWTEST=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export PARA_CONFIG=$NWTEST/gfs.${gfs_ver}/driver/para_config.gfs_forecast_low -export JOBGLOBAL=$NWTEST/gfs.${gfs_ver}/jobs - - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_FORECAST_LOW - -exit - diff --git a/driver/gfs/test_jgfs_cyclone_tracker.ecf b/driver/gfs/test_jgfs_cyclone_tracker.ecf deleted file mode 100755 index 326ac94fc3..0000000000 --- a/driver/gfs/test_jgfs_cyclone_tracker.ecf +++ /dev/null @@ -1,121 +0,0 @@ -#BSUB -J GFS_tracker_TEST_06 -#BSUB -o /ptmpp2/Qingfu.Liu/com2/gfs_cyclone_traker_06.o%J -#BSUB -e /ptmpp2/Qingfu.Liu/com2/gfs_cyclone_traker_06.o%J -#BSUB -L /bin/sh -#BSUB -q debug -#BSUB -W 00:30 -#BSUB -cwd /ptmpp2/Qingfu.Liu/com2 -#BSUB -P GFS-T2O -##BSUB -R rusage[mem=5000] -##BSUB -R affinity[core] -#BSUB -n 1 -#BSUB -R span[ptile=1] -#BSUB -R affinity[cpu(32):distribute=balance] -#BSUB -a poe -#BSUB -x - -#%include -#%include -. /usrx/local/Modules/default/init/ksh -module use /nwprod2/modulefiles -module load grib_util/v1.0.1 -module load prod_util/v1.0.1 -module load ibmpe ics lsf - -set -x - -export OMP_NUM_THREADS=32 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:32 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=dev2 -export cyc=06 -export job=gfs_cyclone_tracker_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -export NWROOT=/global/save/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -export DATAROOT=/ptmpp2/Qingfu.Liu - -#export COMROOT=/com2 -export COMROOT=/ptmpp2/Qingfu.Liu/com2 -export COMDATEROOT=/com2 - -export shared_global_home=$NWROOT/shared_nco_20160129 -export gfs_global_home=$NWROOT/gfs_nco_20160129 -export gdas_global_home=$NWROOT/gdas_nco_20160129 -#export DCOMROOT=/dcom - -export COMROOTp1=/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t00z - -#which setpdy.sh -setpdy.sh -. PDY - -export COMINgfs=/com/gfs/prod/gfs.${PDY} -export COMINgdas=/com/gfs/prod/gdas.${PDY} - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -# CALL executable job script here -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMERELO=$shared_global_home -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gfs_global_home/jobs/JGFS_ATMOS_CYCLONE_TRACKER - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -######################################################################## -###################################################################### -#PURPOSE: Executes the job JGFS_ATMOS_CYCLONE_TRACKER -###################################################################### -############################################################### -## Function been tested: creates GFS TC forecast track -## -## Calling sequence: JGFS_ATMOS_CYCLONE_TRACKER, global_extrkr.sh -## -## Initial condition: provide hours (cyc=?) -## -## Usage: bsub < test_jgfs_cyclone_tracker -## -## Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -## COMINgdas=/com/gfs/prod/gdas.${PDY} -## -## Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gfs.$PDY -## -## Result verification: compare with the operational results -################################################################ -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gfs/test_jgfs_cyclone_tracker_cray.sh b/driver/gfs/test_jgfs_cyclone_tracker_cray.sh deleted file mode 100755 index 595512503e..0000000000 --- a/driver/gfs/test_jgfs_cyclone_tracker_cray.sh +++ /dev/null @@ -1,167 +0,0 @@ -#!/bin/bash -#BSUB -J t1534 -#BSUB -W 0:30 -#BSUB -extsched 'CRAYLINUX[]' -R '1*{order[slots] select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -#BSUB -o /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gfs.v14.1.0/driver/test_jgfs_cyclone_tracker_06.o%J -#BSUB -e /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gfs.v14.1.0/driver/test_jgfs_cyclone_tracker_06.o%J -###BSUB -o t574.stdout.%J -###BSUB -e t574.stderr.%J -#BSUB -q "dev" -#BSUB -P "GFS-T2O" -#BSUB -M 200 -###BSUB -M "60" - - -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_util -module load prod_envir/1.1.0 -module unload grib_util -module load grib_util/1.0.3 -##module load crtm-intel/2.2.3 -module load PrgEnv-intel craype cray-mpich ESMF-intel-haswell/3_1_0rp5 -module list - -#export MKL_CBWR=AVX -#ulimit -s unlimited -#ulimit -a - -set -x - -export OMP_NUM_THREADS=24 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:24 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -#export envir=prod -export envir=para -export cyc=06 -export job=test_jgfs_cyclone_tracker_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -#export NWROOT=/global/save/Qingfu.Liu -export NWROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -#export DATAROOT=/ptmpp2/Qingfu.Liu -export DATAROOT=/gpfs/hps/ptmp/Qingfu.Liu - -#export COMROOT=/com2 -#export COMROOT=/ptmpp2/Qingfu.Liu/com2 -#export COMROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -#export COMDATEROOT=/com -#export COMROOT=/gpfs/hps/ptmp/emc.glopara/com2 -#export COMDATEROOT=/com2 -export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export COMDATEROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -#export DCOMROOT=/dcom - -#export COMROOTp1=/gpfs/gp1/nco/ops/com -#export COMROOTp1=/gpfs/tp2/nco/ops/com -#export COMROOTp1=/gpfs/gp2/nco/ops/com -export COMROOTp1=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t${cyc}z - -#which setpdy.sh -#setpdy.sh -#. PDY - -export archsyndir=${COMINsyn:-$(compath.py gfs/prod/syndat)} -export WGRIB2=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.3/exec/wgrib2 -export GRB2INDEX=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.3/exec/grb2index -export GRBINDEX2=/gpfs/hps/nco/ops/nwprod/grib_util.v1.0.3/exec/grb2index - -#export PDY=20150723 -export PDY=20140814 - -#export COMINgfs=/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/com/gfs/prod/gdas.${PDY} -#export COMINgfs=/gpfs/gp2/nco/ops/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/gpfs/gp2/nco/ops/com/gfs/prod/gdas.${PDY} -export COMINgfs=$COMROOT/gfs/$envir/gfs.${PDY} -export COMINgdas=$COMROOT/gfs/$envir/gdas.${PDY} -export ARCHSYND=${COMINsyn:-$(compath.py gfs/prod/syndat)} -export HOMENHC=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/guidance/storm-data/ncep -#export GETGES_COM=/gpfs/gp2/nco/ops/com -#export GESROOT=/gpfs/gp2/nco/ops/com -#export GESROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export GETGES_COM=/gpfs/hps/ptmp/Qingfu.Liu/com -export GESROOT=$COMROOT -export GETGES_COM=$COMROOT - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -export shared_global_home=$NWROOT/global_shared.v14.1.0 -export gfs_global_home=$NWROOT/gfs.v14.1.0 -export gdas_global_home=$NWROOT/gdas.v14.1.0 - -export files_override=F -export PROCESS_TROPCY=NO -export copy_back=NO -export SENDCOM=NO -export APRNRELOC="time aprun -b -j1 -n7 -N1 -d24 -cc depth " -export APRNGETTX="time aprun -q -j1 -n1 -N1 -d1 -cc depth" -#export APRNRELOC="time aprun -b -j0 -n7 -N1 -d32 -cc depth" - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gfs_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -###################################################################### -############################################################# -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# # syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# # tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgfs_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gfs.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################## -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gfs/test_jgfs_tropcy_qc_reloc.ecf b/driver/gfs/test_jgfs_tropcy_qc_reloc.ecf deleted file mode 100755 index 07c85f36b9..0000000000 --- a/driver/gfs/test_jgfs_tropcy_qc_reloc.ecf +++ /dev/null @@ -1,124 +0,0 @@ -#BSUB -J Relocation_TEST_06 -#BSUB -o /ptmpp2/Qingfu.Liu/com2/gfs_tropcy_qc_reloc_06.o%J -#BSUB -e /ptmpp2/Qingfu.Liu/com2/gfs_tropcy_qc_reloc_06.o%J -#BSUB -L /bin/sh -#BSUB -q debug -#BSUB -W 00:30 -#BSUB -cwd /ptmpp2/Qingfu.Liu/com2 -#BSUB -P GFS-T2O -##BSUB -R rusage[mem=5000] -##BSUB -R affinity[core] -#BSUB -n 7 -#BSUB -R span[ptile=1] -#BSUB -R affinity[cpu(32):distribute=balance] -#BSUB -a poe -#BSUB -x - -#%include -#%include -. /usrx/local/Modules/default/init/ksh -module use /nwprod2/modulefiles -module load grib_util/v1.0.1 -module load prod_util/v1.0.1 -module load ibmpe ics lsf - -set -x - -export OMP_NUM_THREADS=32 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:32 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=dev2 -export cyc=06 -export job=gfs_tropcy_qc_reloc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -export NWROOT=/global/save/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -export DATAROOT=/ptmpp2/Qingfu.Liu - -#export COMROOT=/com2 -export COMROOT=/ptmpp2/Qingfu.Liu/com2 -export COMDATEROOT=/com2 - -#export DCOMROOT=/dcom - -export shared_global_home=$NWROOT/shared_nco_20160129 -export gfs_global_home=$NWROOT/gfs_nco_20160129 -export gdas_global_home=$NWROOT/gdas_nco_20160129 - -export COMROOTp1=/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t00z - -#which setpdy.sh -setpdy.sh -. PDY - -export COMINgfs=/com/gfs/prod/gfs.${PDY} -export COMINgdas=/com/gfs/prod/gdas.${PDY} - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gfs_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -##PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -####################################################################### -############################################################## -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgfs_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gfs.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################### -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gfs/test_jgfs_tropcy_qc_reloc_cray.sh b/driver/gfs/test_jgfs_tropcy_qc_reloc_cray.sh deleted file mode 100755 index 1c82464246..0000000000 --- a/driver/gfs/test_jgfs_tropcy_qc_reloc_cray.sh +++ /dev/null @@ -1,155 +0,0 @@ -#!/bin/bash -#BSUB -J t1534 -#BSUB -W 0:30 -#BSUB -extsched 'CRAYLINUX[]' -R '1*{order[slots] select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24] cu[type=cabinet]}' -#BSUB -o /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gfs.v14.1.0/driver/gfs_tropcy_qc_reloc_06.o%J -#BSUB -e /gpfs/hps/emc/global/noscrub/Qingfu.Liu/gfs.v14.1.0/driver/gfs_tropcy_qc_reloc_06.o%J -###BSUB -o t574.stdout.%J -###BSUB -e t574.stderr.%J -#BSUB -q "dev" -#BSUB -P "GFS-T2O" -#BSUB -M 200 -###BSUB -M "60" - - -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_util -module load prod_envir/1.1.0 -module unload grib_util -module load grib_util/1.0.3 -##module load crtm-intel/2.2.3 -module load PrgEnv-intel craype cray-mpich ESMF-intel-haswell/3_1_0rp5 -module list - -#export MKL_CBWR=AVX -#ulimit -s unlimited -#ulimit -a - -set -x - -export OMP_NUM_THREADS=24 - - export MP_LABELIO=yes - export MP_MPILIB=mpich2 - export MP_EUILIB=us - export MP_TASK_AFFINITY=cpu:24 - export MP_USE_BULK_XFER=yes - export MP_STDOUTMODE=unordered - export MPICH_ALLTOALL_THROTTLE=0 - export MP_COREFILE_FORMAT=core.txt - export OMP_STACKSIZE=3G - export MP_COMPILER=intel - -export envir=para -export cyc=06 -export job=gfs_tropcy_qc_reloc_${cyc} -export RUN_ENVIR=test -#export NWROOT=/nwprod2 -#export NWROOT=/global/save/Qingfu.Liu -export NWROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu - -#export DATAROOT=/tmpnwprd_p2 -#export DATAROOT=/ptmpp2/Qingfu.Liu -export DATAROOT=/gpfs/hps/ptmp/Qingfu.Liu - -#export COMROOT=/com2 -#export COMROOT=/ptmpp2/Qingfu.Liu/com2 -#export COMROOT=/gpfs/hps/ptmp/Qingfu.Liu/com -#export COMDATEROOT=/com -export COMROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export COMDATEROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -#export DCOMROOT=/dcom - -#export COMROOTp1=/gpfs/gp1/nco/ops/com -#export COMROOTp1=/gpfs/tp1/nco/ops/com -export COMROOTp1=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export KEEPDATA=YES -export CLEAN=NO -export cycle=t${cyc}z - -#which setpdy.sh -#setpdy.sh -#. PDY - -export PDY=20140814 -##export PDY=20150723 - -#export COMINgfs=/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/com/gfs/prod/gdas.${PDY} -#export COMINgfs=/gpfs/tp1/nco/ops/com/gfs/prod/gfs.${PDY} -#export COMINgdas=/gpfs/tp1/nco/ops/com/gfs/prod/gdas.${PDY} -export COMINgfs=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/$envir/gfs.${PDY} -export COMINgdas=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com/gfs/$envir/gdas.${PDY} -export ARCHSYND=${COMINsyn:-$(compath.py gfs/prod/syndat)} -export HOMENHC=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/guidance/storm-data/ncep -#export GETGES_COM=/gpfs/tp1/nco/ops/com -export GETGES_COM=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com -export GESROOT=/gpfs/hps/emc/global/noscrub/Qingfu.Liu/com - -# versions file for tracker $tracker.ver -VERSION_FILE=${NWROOT}/versions/tropcy_qc_reloc.ver -if [ -f $VERSION_FILE ]; then - . $VERSION_FILE -else - ecflow_client --abort - exit -fi - -export shared_global_home=$NWROOT/global_shared.v14.1.0 -export gfs_global_home=$NWROOT/gfs.v14.1.0 -export gdas_global_home=$NWROOT/gdas.v14.1.0 - -export files_override=F -export PROCESS_TROPCY=NO -export copy_back=NO -export SENDCOM=NO -export APRNRELOC="time aprun -b -j1 -n7 -N1 -d24 -cc depth " -export APRNGETTX="time aprun -q -j1 -n1 -N1 -d1 -cc depth" -#export APRNRELOC="time aprun -b -j0 -n7 -N1 -d32 -cc depth" - -# CALL executable job script here -export HOMERELO=$shared_global_home -#export HOMERELO=${NWROOT}/tropcy_qc_reloc.${tropcy_qc_reloc_ver}_r62774_phase2 -export HOMESYND=${HOMERELO} -#export envir_getges=prod -$gfs_global_home/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC - - -if [ $? -ne 0 ]; then -# ecflow_client --abort - exit -fi - -#%include -#%manual -###################################################################### -#PURPOSE: Executes the job JGLOBAL_ATMOS_TROPCY_QC_RELOC -###################################################################### -############################################################# -# Function been tested: TCvital quality control and archive, hurricane relocation -# -# Calling sequence: JGLOBAL_ATMOS_TROPCY_QC_RELOC, exglobal_atmos_tropcy_qc_reloc.sh, -# # syndat_qctropcy.sh, tropcy_relocate.sh,syndat_getjtbul.sh, -# # tropcy_relocate_extrkr.sh,parse-storm-type.pl -# -# Initial condition: provide hours (cyc=?) -# -# Usage: bsub < test_jgfs_tropcy_qc_reloc -# -# Data_In: COMINgfs=/com/gfs/prod/gfs.${PDY} -# COMINgdas=/com/gfs/prod/gdas.${PDY} -# -# Data_Out: /ptmpp2/Qingfu.Liu/com2/gfs/dev2/gfs.${PDY} -# -# Result verification: compare with the operational results -# (results might be slightly different due to 3hourly/hourly tracker) -############################################################## -###################################################################### -# Job specific troubleshooting instructions: -# see generic troubleshoot manual page -# -###################################################################### - -# include manual page below -#%end diff --git a/driver/gfs/test_jgfs_vminmon.sh b/driver/gfs/test_jgfs_vminmon.sh deleted file mode 100755 index 1483b3352e..0000000000 --- a/driver/gfs/test_jgfs_vminmon.sh +++ /dev/null @@ -1,84 +0,0 @@ -#!/bin/ksh - -#BSUB -o gfs_vminmon.o%J -#BSUB -e gfs_vminmon.o%J -#BSUB -J gfs_vminmon -#BSUB -q dev_shared -#BSUB -n 1 -#BSUB -R affinity[core] -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -a poe -#BSUB -P GFS-T2O - -set -x - -export NET='gfs' -export RUN='gfs' -export PDATE=${PDATE:-2016030206} - -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gfs_vminmon.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export Z=${Z:-gz} -me=$(hostname | cut -c1) -export DATAROOT=${DATAROOT:-/gpfs/${me}d2/emc/da/noscrub/Edward.Safford/test_data} -export COMROOT=${COMROOT:-/ptmpp1/$LOGNAME/com} - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. /usrx/local/Modules/3.2.9/init/ksh -module use /nwprod2/modulefiles -#module load grib_util -module load prod_util -#module load util_shared - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export MINMON_SUFFIX=testminmon -export NWTEST=${NWTEST:-/gpfs/${me}d2/emc/da/noscrub/Edward.Safford} -export HOMEgfs=${HOMEgfs:-${NWTEST}/gfs.${gfs_ver}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -#export HOMEminmon=${HOMEminmon:-${NWTEST}/global_shared.${global_shared_ver}} - -export COM_IN=${COM_IN:-${DATAROOT}} -export M_TANKverf=${M_TANKverf:-${COMROOT}/${MINMON_SUFFIX}} - -jlogdir=${jlogdir:-/ptmpp1/${LOGNAME}/jlogs} -if [[ ! -d ${jlogdir} ]]; then - mkdir -p ${jlogdir} -fi - -export jlogfile=${jlogfile:-${jlogdir}/${MINMON_SUFFIX}.${NET}.${RUN}.jlogfile} -if [[ -e ${jlogfile} ]]; then - rm -f ${jlogfile} -fi - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_VMINMON - -exit - diff --git a/driver/gfs/test_jgfs_vminmon_cray.sh b/driver/gfs/test_jgfs_vminmon_cray.sh deleted file mode 100755 index fd3c6f19a3..0000000000 --- a/driver/gfs/test_jgfs_vminmon_cray.sh +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/ksh - -#BSUB -o gfs_vminmon.o%J -#BSUB -e gfs_vminmon.o%J -#BSUB -J gfs_vminmon -#BSUB -q dev -#BSUB -M 80 -#BSUB -W 00:05 -#BSUB -P GFS-T2O -#BSUB -R "select[mem>80] rusage[mem=80]" - -set -x - -export PDATE=${PDATE:-2016030800} - -############################################################# -# Specify whether the run is production or development -############################################################# -export PDY=$(echo $PDATE | cut -c1-8) -export cyc=$(echo $PDATE | cut -c9-10) -export job=gfs_vminmon.${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export envir=para -export DATAROOT=${DATAROOT:-/gpfs/hps3/emc/da/noscrub/$LOGNAME/test_data} -export COMROOT=${COMROOT:-/gpfs/hps2/ptmp/$LOGNAME/com} - - -############################################################# -# Specify versions -############################################################# -export gfs_ver=v15.0.0 - - -############################################################# -# Load modules -############################################################# -. $MODULESHOME/init/ksh - -module load prod_util -module load pm5 - -module list - - -############################################################# -# WCOSS environment settings -############################################################# -export POE=YES - - -############################################################# -# Set user specific variables -############################################################# -export MINMON_SUFFIX=${MINMON_SUFFIX:-testminmon} -export NWTEST=${NWTEST:-/gpfs/hps3/emc/da/noscrub/${LOGNAME}} -export HOMEgfs=${HOMEgfs:-${NWTEST}/gfs.${gfs_ver}} -export JOBGLOBAL=${JOBGLOBAL:-${HOMEgfs}/jobs} -export COM_IN=${COM_IN:-${DATAROOT}} -export M_TANKverf=${M_TANKverf:-${COMROOT}/${MINMON_SUFFIX}} - -############################################################# -# Execute job -############################################################# -$JOBGLOBAL/JGFS_VMINMON - -exit - diff --git a/driver/product/change_gfs_downstream_date.sh b/driver/product/change_gfs_downstream_date.sh deleted file mode 100755 index c9d4f10e29..0000000000 --- a/driver/product/change_gfs_downstream_date.sh +++ /dev/null @@ -1,46 +0,0 @@ -set -x - -# export cyc=12 -# export cyc=18 -export fhr=012 -export dir=$( pwd ) -export PDY=$(date -u +%Y%m%d) -export PDY1=$(expr $PDY - 1) - -export olddate=20200712 -export newdate=20200922 - -export gdas=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc} -export gdasgp=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc}/gempak -export gdasmeta=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc}/gempak/meta - -export gdastest=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc} -export gdastestgp=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc}/gempak -export gdastestmeta=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc}/gempak/meta - -for cyc in 00 06 12 18 -# for cyc in 00 -do -sed -i "s/${olddate}/${newdate}/g" run_JGDAS_ATMOS_GEMPAK_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_GEMPAK_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_AWIPS_G2_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_FBWIND_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_GEMPAK_META_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_${cyc} -sed -i "s/${olddate}/${newdate}/g" run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_${cyc} - -sed -i s/envir=prod/envir=para/g run_JGDAS_ATMOS_GEMPAK_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_GEMPAK_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_AWIPS_G2_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_FBWIND_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_GEMPAK_META_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_${cyc} -sed -i s/envir=prod/envir=para/g run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_${cyc} - -done diff --git a/driver/product/change_gfs_downstream_envir.sh b/driver/product/change_gfs_downstream_envir.sh deleted file mode 100755 index e10e22fb1f..0000000000 --- a/driver/product/change_gfs_downstream_envir.sh +++ /dev/null @@ -1,35 +0,0 @@ -set -x - -# export cyc=12 -# export cyc=18 -export fhr=012 -export dir=$( pwd ) -export PDY=$(date -u +%Y%m%d) -export PDY1=$(expr $PDY - 1) - -export olddate=20200106 -export newdate=20200712 - -export gdas=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc} -export gdasgp=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc}/gempak -export gdasmeta=/gpfs/dell1/nco/ops/com/gfs/prod/gdas.${PDY}/${cyc}/gempak/meta - -export gdastest=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc} -export gdastestgp=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc}/gempak -export gdastestmeta=/gpfs/dell2/ptmp/Boi.Vuong/com/gfs/prod/gdas.${PDY}/${cyc}/gempak/meta - -# for cyc in 00 06 12 18 -for cyc in 00 -do - vi run_JGDAS_ATMOS_GEMPAK_dell.sh_${cyc} - vi run_JGDAS_ATMOS_GEMPAK_META_NCDC_dell.sh_${cyc} - vi run_JGFS_ATMOS_GEMPAK_dell.sh_${cyc} - vi run_JGFS_ATMOS_AWIPS_20KM_1P0DEG_dell.sh_${cyc} - vi run_JGFS_ATMOS_AWIPS_G2_dell.sh_${cyc} - vi run_JGFS_ATMOS_FBWIND_dell.sh_${cyc} - vi run_JGFS_ATMOS_GEMPAK_META_dell.sh_${cyc} - vi run_JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF_dell.sh_${cyc} - vi run_JGFS_ATMOS_PGRB2_SPEC_NPOESS_dell.sh_${cyc} - vi run_JGFS_ATMOS_GEMPAK_PGRB2_SPEC_dell.sh_${cyc} - -done diff --git a/driver/product/compile_gfsv160.sh b/driver/product/compile_gfsv160.sh deleted file mode 100755 index 0c80153378..0000000000 --- a/driver/product/compile_gfsv160.sh +++ /dev/null @@ -1,17 +0,0 @@ - - -set -x - -export version=v16.0.0 - -cd /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/sorc - -build_gfs_fbwndgfs.sh -build_gfs_util.sh - -cp /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/trim_rh.sh /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/ush - -cd /gpfs/dell2/emc/modeling/noscrub/Boi.Vuong/git/gfs.$version/util/sorc -compile_gfs_util_wcoss.sh - - diff --git a/driver/product/compile_gfsv160_hera.sh b/driver/product/compile_gfsv160_hera.sh deleted file mode 100755 index 2d555e9977..0000000000 --- a/driver/product/compile_gfsv160_hera.sh +++ /dev/null @@ -1,15 +0,0 @@ - - -set -x - -export version=v16.0.0 - -cd /scratch2/NCEPDEV/stmp3/Boi.Vuong/gfs.v16.0.0/sorc - -./build_gfs_fbwndgfs.sh -./build_gfs_util.sh - -cp /scratch2/NCEPDEV/stmp3/Boi.Vuong/trim_rh.sh /scratch2/NCEPDEV/stmp3/Boi.Vuong/gfs.$version/ush - -cd /scratch2/NCEPDEV/stmp3/Boi.Vuong/gfs.$version/util/sorc -sh compile_gfs_util_wcoss.sh diff --git a/driver/product/rmo_clean_gfs_output b/driver/product/rmo_clean_gfs_output deleted file mode 100755 index 2eecf595b8..0000000000 --- a/driver/product/rmo_clean_gfs_output +++ /dev/null @@ -1,23 +0,0 @@ - -set -x - -cd /gpfs/dell2/ptmp/Boi.Vuong/output -ls -l -sleep 3 -cd /gpfs/dell2/ptmp/Boi.Vuong -ls -l -sleep 3 - -rm -rf /gpfs/dell2/ptmp/Boi.Vuong/output -rm -rf /gpfs/dell2/ptmp/Boi.Vuong/com - -mkdir -m 775 -p /gpfs/dell2/ptmp/Boi.Vuong -mkdir -m 775 -p /gpfs/dell2/ptmp/Boi.Vuong/output -mkdir -m 775 -p /gpfs/dell2/ptmp/Boi.Vuong/com - -chmod -R 775 /gpfs/dell2/ptmp/Boi.Vuong/output /gpfs/dell2/ptmp/Boi.Vuong/com - -cd /gpfs/dell2/ptmp/Boi.Vuong/output -ls -exit - diff --git a/driver/product/run_JGFS_NCEPPOST b/driver/product/run_JGFS_NCEPPOST deleted file mode 100755 index b4a6baf9f9..0000000000 --- a/driver/product/run_JGFS_NCEPPOST +++ /dev/null @@ -1,136 +0,0 @@ -#!/bin/sh - -#BSUB -o gfs_post.o%J -#BSUB -e gfs_post.o%J -#BSUB -J gfs_post -#BSUB -extsched 'CRAYLINUX[]' -#BSUB -W 02:00 -#BSUB -q devhigh -#BSUB -P GFS-T2O -#BSUB -M 1000 -#BSUB -cwd /gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work/gfs.v14.1.0/driver - -set -x - -export NODES=3 -export ntasks=24 -export ptile=8 -export threads=1 - -# specify user's own post working directory for testing -export svndir=/gpfs/hps/emc/global/noscrub/emc.glopara/svn/gfs/work -export MP_LABELIO=yes - -export OMP_NUM_THREADS=$threads - - -############################################ -# Loading module -############################################ -. $MODULESHOME/init/ksh -module load PrgEnv-intel ESMF-intel-haswell/3_1_0rp5 cfp-intel-sandybridge iobuf craype-hugepages2M craype-haswell -#module load cfp-intel-sandybridge/1.1.0 -module use /gpfs/hps/nco/ops/nwprod/modulefiles -module load prod_envir/1.1.0 -module load prod_util/1.0.4 -module load grib_util/1.0.3 -##module load crtm-intel/2.2.4 -module list - -export hwrf_ver=v10.0.6 - -# specify PDY (the cycle start yyyymmdd) and cycle -export CDATE=2017052500 -export PDY=$(echo $CDATE | cut -c1-8) -export cyc=$(echo $CDATE | cut -c9-10) -export cycle=t${cyc}z - - -# specify the directory environment for executable, it's either para or prod -export envir=prod - -# set up running dir - -export job=gfs_post_${cyc} -export pid=${pid:-$$} -export jobid=${job}.${pid} - -export DATA=/gpfs/hps/stmp/$LOGNAME/test/$jobid -mkdir -p $DATA -cd $DATA -rm -f ${DATA}/* - -#################################### -# Specify RUN Name and model -#################################### -export NET=gfs -export RUN=gfs - -#################################### -# Determine Job Output Name on System -#################################### -#export pgmout="OUTPUT.${pid}" -#export pgmerr=errfile - -#################################### -# SENDSMS - Flag Events on SMS -# SENDCOM - Copy Files From TMPDIR to $COMOUT -# SENDDBN - Issue DBNet Client Calls -# RERUN - Rerun posts from beginning (default no) -# VERBOSE - Specify Verbose Output in global_postgp.sh -#################################### -export SAVEGES=NO -export SENDSMS=NO -export SENDCOM=YES -export SENDDBN=NO -export RERUN=NO -export VERBOSE=YES - -export HOMEglobal=${svndir}/global_shared.v14.1.0 -export HOMEgfs=${svndir}/gfs.v14.1.0 -############################################## -# Define COM directories -############################################## -##export COMIN=$COMROOThps/gfs/para/gfs.${PDY} -export COMIN=/gpfs/hps/ptmp/emc.glopara/com2/gfs/para/gfs.${PDY} -export COMOUT=/gpfs/hps/ptmp/$LOGNAME/com2/gfs/test/gfs.$PDY -mkdir -p $COMOUT - -############################################## -# Define GES directories -############################################## -gespath=$GESROOThps -export GESdir=$gespath/${RUN}.${PDY} - -#################################### -# Specify Forecast Hour Range -#################################### - -export allfhr="anl 00 01 06 12 60 120 180 240 252 384" -for post_times in $allfhr -do -export post_times - -date - -#export OUTTYP=4 -# need to set FIXglobal to global share superstructure if testing post in non -# super structure environement -export FIXglobal=$svndir/global_shared.v14.1.0/fix -export APRUN="aprun -j 1 -n${ntasks} -N${ptile} -d${threads} -cc depth" -export nemsioget=$svndir/global_shared.v14.1.0/exec/nemsio_get - -export KEEPDATA=YES -export REMOVE_DATA=NO -#export POSTGRB2TBL=$HOMEglobal/parm/params_grib2_tbl_new -$HOMEgfs/jobs/JGFS_NCEPPOST - -############################################################# - -date - -echo $? - -done - - diff --git a/driver/product/run_JGFS_NCEPPOST.sh b/driver/product/run_JGFS_NCEPPOST.sh deleted file mode 100755 index e2169b1215..0000000000 --- a/driver/product/run_JGFS_NCEPPOST.sh +++ /dev/null @@ -1,115 +0,0 @@ -#!/bin/sh - -#BSUB -a poe -#BSUB -P GFS-T2O -#BSUB -eo gfspost1.dayfile.%J -#BSUB -oo gfspost1.dayfile.%J -#BSUB -J gfspost1 -#BSUB -network type=sn_all:mode=US -#BSUB -q "debug2" -#BSUB -n 24 -#BSUB -R span[ptile=8] -#BSUB -R affinity[core(3)] -#BSUB -x -#BSUB -W 00:15 - -############################################################# -# Function been tested: GFS master pgb file for a giving hour. -# -# Calling sequence: run_JGFS_NCEPPOST.sh -> JGFS_NCEPPOST -> exgfs_nceppost.sh -> global_nceppost.sh -> ncep_post -# -# Initial condition: CDATE=2016020900 (where /global/noscrub/emc.glopara/com/gfs/para/gfs.${PDY}${cyc} has data -# post_times="12" (Which hour of the master grib2 file to generate) -# GRIBVERSION=${GRIBVERSION:-'grib2'} (Grib2 data for the master pgb) -# -# -# Usage: bsub $POST_OUT + echo 'export ECF_HOST=${ECF_HOST}' >> $POST_OUT + echo 'export ECF_PORT=${ECF_PORT}' >> $POST_OUT + echo 'export ECF_PASS=${ECF_PASS}' >> $POST_OUT + echo 'export ECF_TRYNO=${ECF_TRYNO}' >> $POST_OUT + echo 'export ECF_RID=${ECF_RID}' >> $POST_OUT +fi + +# Define error handler +ERROR() { + set +ex + if [ "$1" -eq 0 ]; then + msg="Killed by signal (likely via qdel)" + else + msg="Killed by signal $1" + fi + ecflow_client --abort="$msg" + echo $msg + if [[ " ops.prod ops.para " =~ " $(whoami) " ]]; then + echo "# Trap Caught" >>$POST_OUT + fi + trap $1; exit $1 +} +# Trap all error and exit signals +trap 'ERROR $?' ERR EXIT + diff --git a/ecf/include/tail.h b/ecf/include/tail.h new file mode 100644 index 0000000000..c6686c5ca1 --- /dev/null +++ b/ecf/include/tail.h @@ -0,0 +1,3 @@ +timeout 300 ecflow_client --complete # Notify ecFlow of a normal end +trap 0 # Remove all traps +exit 0 # End the shell diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf new file mode 100755 index 0000000000..294d5c25d9 --- /dev/null +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_diag.ecf @@ -0,0 +1,56 @@ +#PBS -S /bin/bash +#PBS -N enkf%RUN%_diag_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:06:00 +#PBS -l select=1:mpiprocs=48:ompthreads=1:ncpus=48:mem=24GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ENKF_DIAG + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf new file mode 100755 index 0000000000..b94e3a18e3 --- /dev/null +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_select_obs.ecf @@ -0,0 +1,56 @@ +#PBS -S /bin/bash +#PBS -N enkf%RUN%_select_obs_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=12:mpiprocs=40:ompthreads=3:ncpus=120 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load python/${python_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} +module load crtm/${crtm_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ENKF_SELECT_OBS + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf new file mode 100755 index 0000000000..6611afff52 --- /dev/null +++ b/ecf/scripts/enkfgdas/analysis/create/jenkfgdas_update.ecf @@ -0,0 +1,57 @@ +#PBS -S /bin/bash +#PBS -N enkf%RUN%_update_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:30:00 +#PBS -l select=35:mpiprocs=9:ompthreads=14:ncpus=126 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load python/${python_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ENKF_UPDATE + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf new file mode 100755 index 0000000000..1ed2568d61 --- /dev/null +++ b/ecf/scripts/enkfgdas/analysis/recenter/ecen/jenkfgdas_ecen.ecf @@ -0,0 +1,59 @@ +#PBS -S /bin/bash +#PBS -N enkf%RUN%_ecen_%FHRGRP%_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=3:mpiprocs=32:ompthreads=4:ncpus=128 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load python/${python_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export FHRGRP=%FHRGRP% +export cyc=%CYC% +export cycle=t%CYC%z +export FHMIN_ECEN=$FHRGRP +export FHMAX_ECEN=$FHRGRP +export FHOUT_ECEN=$FHRGRP + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ENKF_ECEN + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf new file mode 100755 index 0000000000..39d4ec2e8d --- /dev/null +++ b/ecf/scripts/enkfgdas/analysis/recenter/jenkfgdas_sfc.ecf @@ -0,0 +1,57 @@ +#PBS -S /bin/bash +#PBS -N enkf%RUN%_sfc_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:06:00 +#PBS -l select=1:mpiprocs=80:ompthreads=1:ncpus=80:mem=80GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load python/${python_ver} +module load libjpeg/${libjpeg_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} +module load grib_util/${grib_util_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ENKF_SFC + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf new file mode 100755 index 0000000000..923d208350 --- /dev/null +++ b/ecf/scripts/enkfgdas/forecast/jenkfgdas_fcst.ecf @@ -0,0 +1,55 @@ +#PBS -S /bin/bash +#PBS -N enkf%RUN%_fcst_%ENSGRP%_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:40:00 +#PBS -l select=4:mpiprocs=128:ompthreads=1:ncpus=128 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +###############i############################################# +export ENSGRP=%ENSGRP% +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ENKF_FCST + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/enkfgdas/post/.gitignore b/ecf/scripts/enkfgdas/post/.gitignore new file mode 100644 index 0000000000..b82816a86e --- /dev/null +++ b/ecf/scripts/enkfgdas/post/.gitignore @@ -0,0 +1,2 @@ +# Ignore these +jenkfgdas_post_f*.ecf diff --git a/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf new file mode 100755 index 0000000000..6627b97c10 --- /dev/null +++ b/ecf/scripts/enkfgdas/post/jenkfgdas_post_master.ecf @@ -0,0 +1,57 @@ +#PBS -S /bin/bash +#PBS -N enkf%RUN%_post_f%FHOUT_EPOS%_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:15:00 +#PBS -l select=3:mpiprocs=32:ompthreads=4:ncpus=128 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export FHMIN_EPOS=%FHOUT_EPOS% +export FHMAX_EPOS=%FHOUT_EPOS% +export FHOUT_EPOS=%FHOUT_EPOS% +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGDAS_ENKF_POST + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf new file mode 100755 index 0000000000..36b9272204 --- /dev/null +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis.ecf @@ -0,0 +1,75 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_analysis_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:50:00 +#PBS -l select=52:mpiprocs=15:ompthreads=8:ncpus=120 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +export model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load python/${python_ver} +module load libjpeg/${libjpeg_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} +module load grib_util/${grib_util_ver} +module load crtm/${crtm_ver} + +module list + +############################################################# +# environment settings +############################################################# +export FORT_BUFFERED=true +export OMP_NUM_THREADS_CY=128 +export NTHREADS=$OMP_NUM_THREADS_CY +export NTHREADS_GSI=$threads +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +export PREINP='' + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf new file mode 100755 index 0000000000..41601c4de8 --- /dev/null +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_calc.ecf @@ -0,0 +1,65 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_analysis_calc_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load python/${python_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf new file mode 100755 index 0000000000..f233fed21e --- /dev/null +++ b/ecf/scripts/gdas/atmos/analysis/jgdas_atmos_analysis_diag.ecf @@ -0,0 +1,66 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_analysis_diag_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:mpiprocs=96:ompthreads=1:ncpus=96:mem=48GB +#PBS -l place=vscatter +#PBS -l debug=true + +export model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ATMOS_ANALYSIS_DIAG +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf new file mode 100755 index 0000000000..039ca56852 --- /dev/null +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak.ecf @@ -0,0 +1,54 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_gempak_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:15:00 +#PBS -l select=1:ncpus=2:mpiprocs=2:mem=4GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load gempak/${gempak_ver} + +module list + +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES + +############################################################ +# CALL executable job script here +############################################################ +export model=gdas + +${HOMEgfs}/jobs/JGDAS_ATMOS_GEMPAK + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf new file mode 100755 index 0000000000..61f7f0a17f --- /dev/null +++ b/ecf/scripts/gdas/atmos/gempak/jgdas_atmos_gempak_meta_ncdc.ecf @@ -0,0 +1,49 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_gempak_meta_ncdc_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:30:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export model=%model:gdas% +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load gempak/${gempak_ver} + +module list + +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf new file mode 100755 index 0000000000..5322c363ac --- /dev/null +++ b/ecf/scripts/gdas/atmos/obsproc/dump/jgdas_atmos_tropcy_qc_reloc.ecf @@ -0,0 +1,52 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_tropcy_qc_reloc_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:05:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +export PROCESS_TROPCY=YES +export DO_RELOCATE=YES +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf new file mode 100755 index 0000000000..e0ab513b33 --- /dev/null +++ b/ecf/scripts/gdas/atmos/obsproc/prep/jgdas_atmos_emcsfc_sfc_prep.ecf @@ -0,0 +1,50 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_emcsfc_sfc_prep_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:08:00 +#PBS -l select=1:ncpus=1:mem=2GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/post/.gitignore b/ecf/scripts/gdas/atmos/post/.gitignore new file mode 100644 index 0000000000..851760300f --- /dev/null +++ b/ecf/scripts/gdas/atmos/post/.gitignore @@ -0,0 +1,3 @@ +# Ignore these +jgdas_atmos_post_anl.ecf +jgdas_atmos_post_f*.ecf diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf new file mode 100755 index 0000000000..55d7932aaf --- /dev/null +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_manager.ecf @@ -0,0 +1,44 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_post_manager_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=01:15:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ + +module list + +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_POST_MANAGER + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf new file mode 100755 index 0000000000..f88fdcdaf9 --- /dev/null +++ b/ecf/scripts/gdas/atmos/post/jgdas_atmos_post_master.ecf @@ -0,0 +1,66 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_post_%FHR%_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:12:00 +#PBS -l select=1:mpiprocs=126:ompthreads=1:ncpus=126 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load libjpeg/${libjpeg_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} +module load grib_util/${grib_util_ver} +module load crtm/${crtm_ver} +module load g2tmpl/${g2tmpl_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export FHR=%FHR% +export post_times=%HR% +export FHRGRP=%FHRGRP% +export FHRLST=%FHRLST% +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +export g2tmpl_ver=v${g2tmpl_ver} + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf new file mode 100755 index 0000000000..6993e4ae4f --- /dev/null +++ b/ecf/scripts/gdas/atmos/post_processing/jgdas_atmos_chgres_forenkf.ecf @@ -0,0 +1,57 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_enkf_chgres_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:ncpus=3:mpiprocs=3:ompthreads=1:mem=200GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load python/${python_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGDAS_ATMOS_CHGRES_FORENKF + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf new file mode 100755 index 0000000000..5bf31996d9 --- /dev/null +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfozn.ecf @@ -0,0 +1,55 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_verfozn_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:05:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load netcdf/${netcdf_ver} +module load grib_util/${grib_util_ver} +module load util_shared/${util_shared_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export VERBOSE=YES + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGDAS_ATMOS_VERFOZN + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf new file mode 100755 index 0000000000..b9ee264998 --- /dev/null +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_verfrad.ecf @@ -0,0 +1,55 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_verfrad_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:20:00 +#PBS -l select=1:ncpus=1:mem=5GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load netcdf/${netcdf_ver} +module load grib_util/${grib_util_ver} +module load util_shared/${util_shared_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export VERBOSE=YES + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGDAS_ATMOS_VERFRAD + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf new file mode 100755 index 0000000000..71ef1155a9 --- /dev/null +++ b/ecf/scripts/gdas/atmos/verf/jgdas_atmos_vminmon.ecf @@ -0,0 +1,53 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_vminmon_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:05:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load perl/${perl_ver} +module load util_shared/${util_shared_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export VERBOSE=YES + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGDAS_ATMOS_VMINMON + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/jgdas_forecast.ecf b/ecf/scripts/gdas/jgdas_forecast.ecf new file mode 100755 index 0000000000..69c8e17801 --- /dev/null +++ b/ecf/scripts/gdas/jgdas_forecast.ecf @@ -0,0 +1,54 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_forecast_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=01:00:00 +#PBS -l select=27:mpiprocs=32:ompthreads=3:ncpus=96 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_FORECAST + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf new file mode 100755 index 0000000000..569a7c0894 --- /dev/null +++ b/ecf/scripts/gdas/wave/init/jgdas_wave_init.ecf @@ -0,0 +1,64 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_init_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:mpiprocs=11:ompthreads=1:ncpus=11:mem=2GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_INIT + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf new file mode 100755 index 0000000000..20db8b240d --- /dev/null +++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postpnt.ecf @@ -0,0 +1,65 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_postpnt_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:12:00 +#PBS -l select=4:mpiprocs=50:ompthreads=1:ncpus=50:mem=10GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_PNT + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf new file mode 100755 index 0000000000..29d5a467d3 --- /dev/null +++ b/ecf/scripts/gdas/wave/post/jgdas_wave_postsbs.ecf @@ -0,0 +1,68 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_postsbs_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:20:00 +#PBS -l select=1:mpiprocs=8:ompthreads=1:ncpus=8:mem=10GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_SBS + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf new file mode 100755 index 0000000000..612a29eb71 --- /dev/null +++ b/ecf/scripts/gdas/wave/prep/jgdas_wave_prep.ecf @@ -0,0 +1,72 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_prep_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:mpiprocs=5:ompthreads=1:ncpus=5:mem=100GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load cdo/${cdo_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} +module load udunits/${udunits_ver} +module load gsl/${gsl_ver} +module load nco/${nco_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_PREP + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf new file mode 100755 index 0000000000..a30eceae57 --- /dev/null +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis.ecf @@ -0,0 +1,77 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_analysis_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:40:00 +#PBS -l select=55:mpiprocs=15:ompthreads=8:ncpus=120 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +export model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load python/${python_ver} +module load libjpeg/${libjpeg_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} +module load grib_util/${grib_util_ver} +module load crtm/${crtm_ver} + +module list + +############################################################# +# environment settings +############################################################# +export FORT_BUFFERED=true +export OMP_NUM_THREADS_CY=128 +export NTHREADS=$OMP_NUM_THREADS_CY +export NTHREADS_GSI=$threads + +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +export PREINP='' + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf new file mode 100755 index 0000000000..41601c4de8 --- /dev/null +++ b/ecf/scripts/gfs/atmos/analysis/jgfs_atmos_analysis_calc.ecf @@ -0,0 +1,65 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_analysis_calc_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:mpiprocs=128:ompthreads=1:ncpus=128 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load python/${python_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf new file mode 100755 index 0000000000..c69a02fedf --- /dev/null +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak.ecf @@ -0,0 +1,56 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_gempak_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=03:00:00 +#PBS -l select=1:ncpus=28:mpiprocs=28:mem=2GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load gempak/${gempak_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf new file mode 100755 index 0000000000..69f736b705 --- /dev/null +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_meta.ecf @@ -0,0 +1,55 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_gempak_meta_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=03:00:00 +#PBS -l select=1:ncpus=23:mpiprocs=23:mem=2GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load gempak/${gempak_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_META + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf new file mode 100755 index 0000000000..d72622b93a --- /dev/null +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_gempak_ncdc_upapgif.ecf @@ -0,0 +1,52 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_gempak_upapgif_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=02:00:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load gempak/${gempak_ver} +module load imagemagick/${imagemagick_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf new file mode 100755 index 0000000000..8837e94948 --- /dev/null +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_npoess_pgrb2_0p5deg.ecf @@ -0,0 +1,51 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_npoess_pgrb2_0p5deg_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=02:00:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf new file mode 100755 index 0000000000..aa89d37bfc --- /dev/null +++ b/ecf/scripts/gfs/atmos/gempak/jgfs_atmos_pgrb2_spec_gempak.ecf @@ -0,0 +1,53 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_pgrb2_spec_gempak_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:30:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load gempak/${gempak_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf new file mode 100755 index 0000000000..5322c363ac --- /dev/null +++ b/ecf/scripts/gfs/atmos/obsproc/dump/jgfs_atmos_tropcy_qc_reloc.ecf @@ -0,0 +1,52 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_tropcy_qc_reloc_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:05:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +export PROCESS_TROPCY=YES +export DO_RELOCATE=YES +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf new file mode 100755 index 0000000000..f0a1a3346f --- /dev/null +++ b/ecf/scripts/gfs/atmos/obsproc/prep/jgfs_atmos_emcsfc_sfc_prep.ecf @@ -0,0 +1,50 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_emcsfc_sfc_prep_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:07:00 +#PBS -l select=1:ncpus=1:mem=2GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post/.gitignore b/ecf/scripts/gfs/atmos/post/.gitignore new file mode 100644 index 0000000000..01d641c46d --- /dev/null +++ b/ecf/scripts/gfs/atmos/post/.gitignore @@ -0,0 +1,3 @@ +# Ignore these +jgfs_atmos_post_anl.ecf +jgfs_atmos_post_f*.ecf diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf new file mode 100755 index 0000000000..fc22e941bc --- /dev/null +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_manager.ecf @@ -0,0 +1,44 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_post_manager_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=04:00:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ + +module list + +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_ATMOS_POST_MANAGER + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf new file mode 100755 index 0000000000..ad717147fc --- /dev/null +++ b/ecf/scripts/gfs/atmos/post/jgfs_atmos_post_master.ecf @@ -0,0 +1,71 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_post_%FHR%_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:20:00 +#PBS -l select=1:mpiprocs=126:ompthreads=1:ncpus=126 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +export FHRGRP=%FHRGRP% +export FHRLST=%FHRLST% +export FHR=%FHR% +export post_times=%HR% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load libjpeg/${libjpeg_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} +module load grib_util/${grib_util_ver} +module load crtm/${crtm_ver} +module load g2tmpl/${g2tmpl_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export FHR=%FHR% +export post_times=%HR% +export FHRGRP=%FHRGRP% +export FHRLST=%FHRLST% +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +export g2tmpl_ver=v${g2tmpl_ver} + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/.gitignore b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/.gitignore new file mode 100644 index 0000000000..6eef43ca90 --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/.gitignore @@ -0,0 +1,2 @@ +# Ignore these +jgfs_atmos_awips_f*.ecf diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf new file mode 100755 index 0000000000..2333476033 --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/awips_20km_1p0/jgfs_atmos_awips_master.ecf @@ -0,0 +1,56 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_awips_%FCSTHR%_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:ncpus=1:mem=3GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export FHRGRP=%FHRGRP% FHRLST=%FHRLST% FCSTHR=%FCSTHR% TRDRUN=%TRDRUN% fcsthrs=%FCSTHR% +export job=jgfs_awips_f%FCSTHR%_%CYC% + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_g2/.gitignore b/ecf/scripts/gfs/atmos/post_processing/awips_g2/.gitignore new file mode 100644 index 0000000000..37e58b180a --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/awips_g2/.gitignore @@ -0,0 +1,2 @@ +# Ignore these +jgfs_atmos_awips_g2_f*.ecf diff --git a/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf new file mode 100755 index 0000000000..a175e34e5a --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/awips_g2/jgfs_atmos_awips_g2_master.ecf @@ -0,0 +1,61 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_awips_g2_%FCSTHR%_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:05:00 +#PBS -l select=1:ncpus=1:mem=3GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +export FHRGRP=%FHRGRP% +export FHRLST=%FHRLST% +export fcsthrs=%FCSTHR% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +trdrun=%TRDRUN% +export job="jgfs_awips_f${fcsthrs}_${cyc}" + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +############################################################### + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf new file mode 100755 index 0000000000..e2ddf7a1e5 --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/bufr_sounding/jgfs_atmos_postsnd.ecf @@ -0,0 +1,59 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_postsnd_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=02:00:00 +#PBS -l select=4:mpiprocs=10:ompthreads=8:ncpus=80 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load python/${python_ver} +module load gempak/${gempak_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +export MPICH_MPIIO_HINTS_DISPLAY=1 +export OMP_NUM_THREADS=1 +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_POSTSND + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf new file mode 100755 index 0000000000..a6fb412f71 --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/bulletins/jgfs_atmos_fbwind.ecf @@ -0,0 +1,53 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_fbwind_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:05:00 +#PBS -l select=1:ncpus=1:mem=4GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_FBWIND + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf new file mode 100755 index 0000000000..d0a6500c7b --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending.ecf @@ -0,0 +1,53 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_wafs_blending_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:30:00 +#PBS -l select=1:mpiprocs=1:ompthreads=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load util_shared/${util_shared_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_BLENDING + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf new file mode 100755 index 0000000000..83647d9c15 --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_blending_0p25.ecf @@ -0,0 +1,54 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_wafs_blending_0p25_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:30:00 +#PBS -l select=1:mpiprocs=1:ompthreads=1:ncpus=1:mem=15GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load util_shared/${util_shared_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export ICAO2023=no + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf new file mode 100755 index 0000000000..25d4fc37aa --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2.ecf @@ -0,0 +1,67 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_wafs_grib2_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:30:00 +#PBS -l select=1:mpiprocs=18:ompthreads=1:ncpus=18:mem=80GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-pals/${cray_pals_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} +module load cfp/${cfp_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +export ICAO2023=no + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_GRIB2 +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf new file mode 100755 index 0000000000..9beac6f13a --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/grib2_wafs/jgfs_atmos_wafs_grib2_0p25.ecf @@ -0,0 +1,67 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_wafs_grib2_0p25_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:30:00 +#PBS -l select=1:mpiprocs=11:ompthreads=1:ncpus=11:mem=80GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-pals/${cray_pals_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} +module load cfp/${cfp_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +export ICAO2023=no + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/grib_wafs/.gitignore b/ecf/scripts/gfs/atmos/post_processing/grib_wafs/.gitignore new file mode 100644 index 0000000000..f4afcf92ad --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/grib_wafs/.gitignore @@ -0,0 +1,2 @@ +# Ignore these +jgfs_atmos_wafs_f*.ecf diff --git a/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf b/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf new file mode 100755 index 0000000000..e7fae7891a --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/grib_wafs/jgfs_atmos_wafs_master.ecf @@ -0,0 +1,65 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_wafs_%FCSTHR%_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:05:00 +#PBS -l select=1:mpiprocs=1:ompthreads=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +export fcsthrs=%FCSTHR% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/$grib_util_ver +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf b/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf new file mode 100755 index 0000000000..00a87f3948 --- /dev/null +++ b/ecf/scripts/gfs/atmos/post_processing/jgfs_atmos_wafs_gcip.ecf @@ -0,0 +1,70 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_wafs_gcip_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:30:00 +#PBS -l select=1:mpiprocs=2:ompthreads=1:ncpus=2:mem=50GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load bufr_dump/${bufr_dump_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export USE_CFP=YES +export ICAO2023=no + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_GCIP + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: To execute the job that prepares initial condition for +# gdas. +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf new file mode 100755 index 0000000000..1e19a0ea3e --- /dev/null +++ b/ecf/scripts/gfs/atmos/verf/jgfs_atmos_vminmon.ecf @@ -0,0 +1,53 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_atmos_vminmon_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:05:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load perl/${perl_ver} +module load util_shared/${util_shared_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z +export VERBOSE=YES + +############################################################ +# CALL executable job script here +############################################################ +$HOMEgfs/jobs/JGFS_ATMOS_VMINMON + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/jgfs_forecast.ecf b/ecf/scripts/gfs/jgfs_forecast.ecf new file mode 100755 index 0000000000..370cd9342d --- /dev/null +++ b/ecf/scripts/gfs/jgfs_forecast.ecf @@ -0,0 +1,53 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_forecast_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=02:30:00 +#PBS -l select=112:mpiprocs=24:ompthreads=5:ncpus=120 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +# Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} + +module list + +############################################################# +# environment settings +############################################################# +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_FORECAST +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual + +%end diff --git a/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf new file mode 100755 index 0000000000..199f68adeb --- /dev/null +++ b/ecf/scripts/gfs/wave/gempak/jgfs_wave_gempak.ecf @@ -0,0 +1,63 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_gempak_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=02:00:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load gempak/${gempak_ver} + +module list + +############################################################ +## WCOSS_C environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_GEMPAK + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf new file mode 100755 index 0000000000..569a7c0894 --- /dev/null +++ b/ecf/scripts/gfs/wave/init/jgfs_wave_init.ecf @@ -0,0 +1,64 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_init_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:mpiprocs=11:ompthreads=1:ncpus=11:mem=2GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_INIT + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf new file mode 100755 index 0000000000..2871a0f1a1 --- /dev/null +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpnt.ecf @@ -0,0 +1,65 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_post_bndpnt_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=01:00:00 +#PBS -l select=3:ncpus=80:ompthreads=1 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_BNDPNT + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf new file mode 100755 index 0000000000..73015f869f --- /dev/null +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_post_bndpntbll.ecf @@ -0,0 +1,63 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_post_bndpntbll_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=01:00:00 +#PBS -l select=4:ncpus=112:ompthreads=1 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load intel/${intel_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf new file mode 100755 index 0000000000..39e58f01c3 --- /dev/null +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postpnt.ecf @@ -0,0 +1,65 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_postpnt_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=01:30:00 +#PBS -l select=4:ncpus=50:ompthreads=1 +#PBS -l place=vscatter:exclhost +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_PNT + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf new file mode 100755 index 0000000000..a1f18fefd8 --- /dev/null +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_postsbs.ecf @@ -0,0 +1,68 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_postsbs_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=03:00:00 +#PBS -l select=1:mpiprocs=8:ompthreads=1:ncpus=8:mem=10GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_SBS + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf new file mode 100755 index 0000000000..5cbd08032b --- /dev/null +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_bulls.ecf @@ -0,0 +1,63 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_prdgen_bulls_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:20:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load util_shared/${util_shared_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_PRDGEN_BULLS + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf new file mode 100755 index 0000000000..616f360d28 --- /dev/null +++ b/ecf/scripts/gfs/wave/post/jgfs_wave_prdgen_gridded.ecf @@ -0,0 +1,65 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_prdgen_gridded_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=02:00:00 +#PBS -l select=1:ncpus=1:mem=1GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load libjpeg/${libjpeg_ver} +module load grib_util/${grib_util_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED + +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf new file mode 100755 index 0000000000..97700528ab --- /dev/null +++ b/ecf/scripts/gfs/wave/prep/jgfs_wave_prep.ecf @@ -0,0 +1,71 @@ +#PBS -S /bin/bash +#PBS -N %RUN%_wave_prep_%CYC% +#PBS -j oe +#PBS -q %QUEUE% +#PBS -A %PROJ%-%PROJENVIR% +#PBS -l walltime=00:10:00 +#PBS -l select=1:mpiprocs=65:ompthreads=1:ncpus=65:mem=150GB +#PBS -l place=vscatter +#PBS -l debug=true + +model=gfs +%include +%include + +set -x + +export NET=%NET:gfs% +export RUN=%RUN% +export CDUMP=%RUN% + +############################################################ +## Load modules +############################################################ +module load PrgEnv-intel/${PrgEnv_intel_ver} +module load craype/${craype_ver} +module load intel/${intel_ver} +module load cray-mpich/${cray_mpich_ver} +module load cray-pals/${cray_pals_ver} +module load cfp/${cfp_ver} +module load cdo/${cdo_ver} +module load hdf5/${hdf5_ver} +module load netcdf/${netcdf_ver} +module load udunits/${udunits_ver} +module load gsl/${gsl_ver} +module load nco/${nco_ver} +module load wgrib2/${wgrib2_ver} + +module list + +############################################################ +## environment settings +############################################################ +export cyc=%CYC% +export cycle=t%CYC%z + +############################################################ +# CALL executable job script here +############################################################ +${HOMEgfs}/jobs/JGLOBAL_WAVE_PREP +if [ $? -ne 0 ]; then + ecflow_client --msg="***JOB ${ECF_NAME} ERROR RUNNING J-SCRIPT ***" + ecflow_client --abort + exit +fi + +%include +%manual +###################################################################### +# Purpose: +# +# +###################################################################### + +###################################################################### +# Job specific troubleshooting instructions: +# see generic troubleshoot manual page +# +###################################################################### + +# include manual page below +%end diff --git a/ecf/setup_ecf_links.sh b/ecf/setup_ecf_links.sh new file mode 100755 index 0000000000..b0dce50cc5 --- /dev/null +++ b/ecf/setup_ecf_links.sh @@ -0,0 +1,59 @@ +#!/bin/bash + +set -eu + +ECF_DIR=$(pwd) + +# Function that loops over forecast hours and +# creates link between the master and target +function link_master_to_fhr(){ + tmpl=$1 # Name of the master template + fhrs=$2 # Array of forecast hours + for fhr in ${fhrs[@]}; do + fhrchar=$(printf %03d $fhr) + master=${tmpl}_master.ecf + target=${tmpl}_f${fhrchar}.ecf + rm -f $target + ln -sf $master $target + done +} + +# EnKF GDAS post files +cd $ECF_DIR/scripts/enkfgdas/post +echo "Linking enkfgdas/post ..." +fhrs=($(seq 3 9)) +link_master_to_fhr "jenkfgdas_post" "$fhrs" + +# GDAS post files +cd $ECF_DIR/scripts/gdas/atmos/post +echo "Linking gdas/atmos/post ..." +rm -f jgdas_atmos_post_anl.ecf +ln -sf jgdas_atmos_post_master.ecf jgdas_atmos_post_anl.ecf +fhrs=($(seq 0 9)) +link_master_to_fhr "jgdas_atmos_post" "$fhrs" + +# GFS post files +cd $ECF_DIR/scripts/gfs/atmos/post +echo "Linking gfs/atmos/post ..." +rm -f jgfs_atmos_post_anl.ecf +ln -sf jgfs_atmos_post_master.ecf jgfs_atmos_post_anl.ecf +fhrs=($(seq 0 1 120) $(seq 123 3 384)) +link_master_to_fhr "jgfs_atmos_post" "$fhrs" + +# GFS awips 20km 1p0 files +cd $ECF_DIR/scripts/gfs/atmos/post_processing/awips_20km_1p0 +echo "Linking gfs/atmos/post_processing/awips_20km_1p0 ..." +fhrs=($(seq 0 3 84) $(seq 90 6 240)) +link_master_to_fhr "jgfs_atmos_awips" "$fhrs" + +# GFS awips g2 files +cd $ECF_DIR/scripts/gfs/atmos/post_processing/awips_g2 +echo "Linking gfs/atmos/post_processing/awips_g2 ..." +fhrs=($(seq 0 3 84) $(seq 90 6 240)) +link_master_to_fhr "jgfs_atmos_awips_g2" "$fhrs" + +# GFS atmos wafs files +cd $ECF_DIR/scripts/gfs/atmos/post_processing/grib_wafs +echo "Linking gfs/atmos/post_processing/grib_wafs ..." +fhrs=($(seq 0 6 120)) +link_master_to_fhr "jgfs_atmos_wafs" "$fhrs" diff --git a/ecflow/ecf/include/envir-p1-old.h b/ecflow/ecf/include/envir-p1-old.h deleted file mode 100644 index ece7cccbfb..0000000000 --- a/ecflow/ecf/include/envir-p1-old.h +++ /dev/null @@ -1,57 +0,0 @@ -# envir-p1.h -export job=${job:-$LSB_JOBNAME} #Can't use $job in filenames! -export jobid=${jobid:-$job.$LSB_JOBID} - -export RUN_ENVIR=${RUN_ENVIR:-nco} -export envir=%ENVIR% -export SENDDBN=${SENDDBN:-%SENDDBN:YES%} -export SENDDBN_NTC=${SENDDBN_NTC:-%SENDDBN_NTC:YES%} - -module load prod_envir prod_util - -case $envir in - prod) - export jlogfile=${jlogfile:-${COMROOT}/logs/jlogfiles/jlogfile.${jobid}} - export DATAROOT=${DATAROOT:-/tmpnwprd1} - if [ "$SENDDBN" == "YES" ]; then - export DBNROOT=/iodprod/dbnet_siphon # previously set in .bash_profile - else - export DBNROOT=${UTILROOT}/fakedbn - fi - ;; - eval) - export envir=para - export jlogfile=${jlogfile:-${COMROOT}/logs/${envir}/jlogfile} - export DATAROOT=${DATAROOT:-/tmpnwprd2} - if [ "$SENDDBN" == "YES" ]; then - export DBNROOT=${UTILROOT}/para_dbn - SENDDBN_NTC=NO - else - export DBNROOT=${UTILROOT}/fakedbn - fi - ;; - para|test) - export jlogfile=${jlogfile:-${COMROOT}/logs/${envir}/jlogfile} - export DATAROOT=${DATAROOT:-/tmpnwprd2} - export DBNROOT=${UTILROOT}/fakedbn - ;; - *) - ecflow_client --abort="ENVIR must be prod, para, eval, or test [envir.h]" - exit - ;; -esac - -export NWROOT=/nw${envir} -export COMROOT=/com -export PCOMROOT=/pcom/${envir} -export SENDECF=${SENDECF:-YES} -export SENDCOM=${SENDCOM:-YES} -export KEEPDATA=${KEEPDATA:-%KEEPDATA:NO%} - -# The following two lines are deprecated (only in the "old" header) -export DCOMROOT=/dcom/us007003 # previously set to /dcom in .bash_profile -export UTILROOT=/nwprod/util - -if [ -n "%PDY:%" ]; then export PDY=${PDY:-%PDY:%}; fi -if [ -n "%MAILTO:%" ]; then export MAILTO=${MAILTO:-%MAILTO:%}; fi -if [ -n "%DBNLOG:%" ]; then export DBNLOG=${DBNLOG:-%DBNLOG:%}; fi diff --git a/ecflow/ecf/include/envir-p3.h b/ecflow/ecf/include/envir-p3.h deleted file mode 100644 index a57a706bed..0000000000 --- a/ecflow/ecf/include/envir-p3.h +++ /dev/null @@ -1,60 +0,0 @@ -# envir-p3.h -export job=${job:-$LSB_JOBNAME} #Can't use $job in filenames! -export jobid=${jobid:-$job.$LSB_JOBID} - -export RUN_ENVIR=${RUN_ENVIR:-nco} -export envir=%ENVIR% -export SENDDBN=${SENDDBN:-%SENDDBN:YES%} -export SENDDBN_NTC=${SENDDBN_NTC:-%SENDDBN_NTC:YES%} -FILESYSTEMROOT=/gpfs/%FILESYSTEM:dell1% - -module load prod_envir/%prod_envir_ver% prod_util/%prod_util_ver% EnvVars/%EnvVars_ver% - -if [ -n "%PARATEST:%" ]; then export PARATEST=${PARATEST:-%PARATEST:%}; fi - -case $envir in - prod) - export DATAROOT=${DATAROOT:-${FILESYSTEMROOT}/nco/ops/tmpnwprd} - if [ "$SENDDBN" == "YES" ]; then - export DBNROOT=/iodprod_dell/dbnet_siphon - else - export DBNROOT=${UTILROOT}/fakedbn - fi - ;; - eval) - export envir=para - export DATAROOT=${DATAROOT:-${FILESYSTEMROOT}/nco/ops/tmpnwprd} - if [ "$SENDDBN" == "YES" ]; then - if [ "$PARATEST" == "YES" ]; then - export DBNROOT=${UTILROOT}/fakedbn - else - export DBNROOT=${UTILROOT}/para_dbn - fi - SENDDBN_NTC=NO - else - export DBNROOT=${UTILROOT}/fakedbn - fi - ;; - para|test) - export DATAROOT=${DATAROOT:-${FILESYSTEMROOT}/nco/ops/tmpnwprd} - export DBNROOT=${UTILROOT}/fakedbn - ;; - *) - ecflow_client --abort="ENVIR must be prod, para, eval, or test [envir.h]" - exit - ;; -esac - -export COMROOT=${FILESYSTEMROOT}/nco/ops/com -export GESROOT=${FILESYSTEMROOT}/nco/ops/nwges -export COREROOT=${FILESYSTEMROOT}/ptmp/production.core/$jobid -export NWROOT=/gpfs/dell1/nco/ops/nw${envir} -export SENDECF=${SENDECF:-YES} -export SENDCOM=${SENDCOM:-YES} -export KEEPDATA=${KEEPDATA:-%KEEPDATA:NO%} -export TMPDIR=${TMPDIR:-${DATAROOT:?}} - -if [ -n "%PDY:%" ]; then export PDY=${PDY:-%PDY:%}; fi -if [ -n "%COMPATH:%" ]; then export COMPATH=${COMPATH:-%COMPATH:%}; fi -if [ -n "%MAILTO:%" ]; then export MAILTO=${MAILTO:-%MAILTO:%}; fi -if [ -n "%DBNLOG:%" ]; then export DBNLOG=${DBNLOG:-%DBNLOG:%}; fi diff --git a/ecflow/ecf/include/head.h b/ecflow/ecf/include/head.h deleted file mode 100644 index 2c49be2b5b..0000000000 --- a/ecflow/ecf/include/head.h +++ /dev/null @@ -1,56 +0,0 @@ -set -xe # print commands as they are executed and enable signal trapping - -export PS4='+ $SECONDS + ' - -# Variables needed for communication with ecFlow -export ECF_NAME=%ECF_NAME% -#export ECF_HOST=%ECF_HOST% -export ECF_HOST=%ECF_LOGHOST% -export ECF_PORT=%ECF_PORT% -export ECF_PASS=%ECF_PASS% -export ECF_TRYNO=%ECF_TRYNO% -export ECF_RID=$LSB_JOBID - -# Tell ecFlow we have started -# POST_OUT variable enables LSF post_exec to communicate with ecFlow -if [ -d /opt/modules ]; then - # WCOSS TO4 (Cray XC40) - . /opt/modules/default/init/sh - module load ecflow - POST_OUT=/gpfs/hps/tmpfs/ecflow/ecflow_post_in.$LSB_BATCH_JID -else - # WCOSS Phase 3 (Dell PowerEdge) - . /usrx/local/prod/lmod/lmod/init/sh - . /gpfs/dell1/nco/ops/nwprod/versions/ecflow_p3.ver - module load ips/$ips_ver - module load EnvVars/$EnvVars_ver - module load ecflow/$ecflow_ver - POST_OUT=/var/lsf/ecflow_post_in.$USER.$LSB_BATCH_JID -fi -ecflow_client --init=${ECF_RID} - -cat > $POST_OUT <>$POST_OUT - trap $1; exit $1 -} -# Trap all error and exit signals -trap 'ERROR $?' ERR EXIT - diff --git a/ecflow/ecf/include/model_ver.h b/ecflow/ecf/include/model_ver.h deleted file mode 100644 index 7db8c034a7..0000000000 --- a/ecflow/ecf/include/model_ver.h +++ /dev/null @@ -1,2 +0,0 @@ -. ${NWROOT:?}/versions/${model:?}.ver -eval export HOME${model}=${NWROOT}/${model}.\${${model}_ver:?} diff --git a/ecflow/ecf/include/tail.h b/ecflow/ecf/include/tail.h deleted file mode 100644 index 1cdbe951a0..0000000000 --- a/ecflow/ecf/include/tail.h +++ /dev/null @@ -1,3 +0,0 @@ -ecflow_client --complete # Notify ecFlow of a normal end -trap 0 # Remove all traps -exit 0 # End the shell diff --git a/ecflow/ecf/versions/gfs.ver b/ecflow/ecf/versions/gfs.ver deleted file mode 100644 index 810f0d4aed..0000000000 --- a/ecflow/ecf/versions/gfs.ver +++ /dev/null @@ -1,30 +0,0 @@ -export gfs_ver="v16.0.0" -export EnvVars_ver="1.0.3" -export lsf_ver="10.1" -export emc_utils_ver="1.0.0" -export ips_ver="18.0.1.163" -export NetCDF_ver="4.5.0" -export HDF5_serial_ver="1.10.1" -export hdf5_parallel_ver="1.10.6" -export netcdf_parallel_ver="4.7.4" -export impi_ver="18.0.1" -export bufr_ver="11.3.0" -export crtm_ver="2.3.0" -export grib_util_ver="1.1.0" -export g2tmpl_ver="1.6.0" -export ESMF_ver="8.0.1bs08" -export CFP_ver="2.0.2" -export NCO_ver="4.7.0" -export pm5_ver="1.0" -export gempak_ver="7.3.3" -export bufr_dumplist_ver="2.0.0" -export dumpjb_ver="5.1.0" -export NCL_ver="6.4.0" -export python_ver="3.6.3" -export prod_envir_ver="1.1.0" -export util_shared_ver="1.3.0" -export prod_util_ver="1.1.4" -export metplus_ver="2.1" -export w3emc_para_ver="2.4.0" -export cdo_ver="1.9.8" -export PATH=.:$PATH diff --git a/env/CONTAINER.env b/env/CONTAINER.env new file mode 100755 index 0000000000..4f85ae56de --- /dev/null +++ b/env/CONTAINER.env @@ -0,0 +1,38 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + echo "argument can be any one of the following:" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" + echo "anal sfcanl fcst post vrfy metp" + echo "eobs eupd ecen efcs epos" + echo "postsnd awips gempak" + exit 1 + +fi + +step=$1 + +export npe_node_max=40 +export launcher="mpirun" +export mpmd_opt="--multi-prog" + +# Configure MPI environment +export MPI_BUFS_PER_PROC=2048 +export MPI_BUFS_PER_HOST=2048 +export MPI_GROUP_MAX=256 +export MPI_MEMMAP_OFF=1 +export MP_STDOUTMODE="ORDERED" +export KMP_AFFINITY=scatter +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 + +ulimit -s unlimited +ulimit -a + + +if [ "${step}" = "ocnanalrun" ]; then + export NTHREADS_OCNANAL=1 + export APRUN_OCNANAL="${launcher} -n 2" +fi diff --git a/env/HERA.env b/env/HERA.env index 32f0bf0d0a..9900e360c7 100755 --- a/env/HERA.env +++ b/env/HERA.env @@ -1,10 +1,11 @@ -#!/bin/ksh -x +#! /usr/bin/env bash -if [ $# -ne 1 ]; then +if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "anal fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" + echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" exit 1 @@ -16,6 +17,7 @@ step=$1 export npe_node_max=40 #JKHexport launcher="srun -l --export=ALL" export launcher="srun -l --epilog=/apps/local/bin/report-mem --export=ALL" +export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" # Configure MPI environment #export I_MPI_ADJUST_ALLREDUCE=5 @@ -31,211 +33,262 @@ export NTHSTACK=1024000000 ulimit -s unlimited ulimit -a -export job=${PBS_JOBNAME:-$step} -export jobid=${job}.${PBS_JOBID:-$$} +if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then - -if [ $step = "prep" -o $step = "prepbufr" ]; then - - nth_max=$(($npe_node_max / $npe_node_prep)) + nth_max=$((npe_node_max / npe_node_prep)) export POE="NO" export BACK="NO" export sys_tp="HERA" + export launcher_PREP="srun" + +elif [[ "${step}" = "preplandobs" ]]; then + + export APRUN_CALCFIMS="${launcher} -n 1" -elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostbndpntbll" -o $step = "wavepostpnt" ]; then +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then - export mpmd="--multi-prog" export CFP_MP="YES" - if [ $step = "waveprep" ]; then export MP_PULSE=0 ; fi + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi export wavempexec=${launcher} - export wave_mpmd=${mpmd} + export wave_mpmd=${mpmd_opt} + +elif [[ "${step}" = "atmanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_atmanlrun)) + + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_atmensanlrun)) + + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_aeroanlrun)) + + export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} + [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" + +elif [[ "${step}" = "landanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_landanlrun)) + + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + nth_max=$((npe_node_max / npe_node_ocnanalbmat)) + + export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" + +elif [[ "${step}" = "ocnanalrun" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + nth_max=$((npe_node_max / npe_node_ocnanalrun)) + + export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" + +elif [[ "${step}" = "ocnanalchkpt" ]]; then -elif [ $step = "anal" ]; then + export APRUNCFP="${launcher} -n \$ncmd --multi-prog" + + nth_max=$((npe_node_max / npe_node_ocnanalchkpt)) + + export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - nth_max=$(($npe_node_max / $npe_node_anal)) + nth_max=$((npe_node_max / npe_node_anal)) - export NTHREADS_GSI=${nth_anal:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher" + export NTHREADS_GSI=${nth_anal:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}" export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher" + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} \$ncmd" export NTHREADS_CYCLE=${nth_cycle:-12} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} npe_cycle=${ntiles:-6} - export APRUN_CYCLE="$launcher -n $npe_cycle" + export APRUN_CYCLE="${launcher} -n ${npe_cycle}" export NTHREADS_GAUSFCANL=1 npe_gausfcanl=${npe_gausfcanl:-1} - export APRUN_GAUSFCANL="$launcher -n $npe_gausfcanl" - - export NTHREADS_CHGRES=${nth_echgres:-1} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="" - -elif [ $step = "gldas" ]; then - - nth_max=$(($npe_node_max / $npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-$nth_max} - [[ $NTHREADS_GLDAS -gt $nth_max ]] && export NTHREADS_GLDAS=$nth_max - export APRUN_GLDAS="$launcher -n $npe_gldas" + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ $NTHREADS_GAUSSIAN -gt $nth_max ]] && export NTHREADS_GAUSSIAN=$nth_max - export APRUN_GAUSSIAN="$launcher -n $npe_gaussian" +elif [[ "${step}" = "sfcanl" ]]; then -# Must run data processing with exactly the number of tasks as time -# periods being processed. + nth_max=$((npe_node_max / npe_node_sfcanl)) - npe_gldas_data_proc=$(($gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="$launcher -n $npe_gldas_data_proc --multi-prog" + export NTHREADS_CYCLE=${nth_sfcanl:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_sfcanl=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" -elif [ $step = "eobs" ]; then +elif [[ "${step}" = "eobs" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO - nth_max=$(($npe_node_max / $npe_node_eobs)) + nth_max=$((npe_node_max / npe_node_eobs)) - export NTHREADS_GSI=${nth_eobs:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher" + export NTHREADS_GSI=${nth_eobs:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}" export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" -elif [ $step = "eupd" ]; then +elif [[ "${step}" = "eupd" ]]; then - nth_max=$(($npe_node_max / $npe_node_eupd)) + nth_max=$((npe_node_max / npe_node_eupd)) - export NTHREADS_ENKF=${nth_eupd:-$nth_max} - [[ $NTHREADS_ENKF -gt $nth_max ]] && export NTHREADS_ENKF=$nth_max - export APRUN_ENKF="$launcher" + export NTHREADS_ENKF=${nth_eupd:-${nth_max}} + [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}" export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" -elif [ $step = "fcst" ]; then +elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then - #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs - if [[ $CDUMP == "gfs" ]]; then - npe_fcst=$npe_fcst_gfs - npe_node_fcst=$npe_node_fcst_gfs - nth_fv3=$nth_fv3_gfs + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" fi + (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) + (( ntasks = nnodes*${!ppn} )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ntasks}" + unset nprocs ppn nnodes ntasks - nth_max=$(($npe_node_max / $npe_node_fcst)) +elif [[ "${step}" = "post" ]]; then - export NTHREADS_FV3=${nth_fv3:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - export APRUN_FV3="$launcher -n $npe_fcst" + nth_max=$((npe_node_max / npe_node_post)) - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher" + export NTHREADS_NP=${nth_np:-1} + [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} + export APRUN_NP="${launcher} -n ${npe_post}" - export NTHREADS_REMAP=${nth_remap:-2} - [[ $NTHREADS_REMAP -gt $nth_max ]] && export NTHREADS_REMAP=$nth_max - export APRUN_REMAP="$launcher" - export I_MPI_DAPL_UD="enable" + export NTHREADS_DWN=${nth_dwn:-1} + [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} + export APRUN_DWN="${launcher} -n ${npe_dwn}" -elif [ $step = "efcs" ]; then +elif [[ "${step}" = "ecen" ]]; then - nth_max=$(($npe_node_max / $npe_node_efcs)) + nth_max=$((npe_node_max / npe_node_ecen)) - export NTHREADS_FV3=${nth_efcs:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - export APRUN_FV3="$launcher -n $npe_efcs" + export NTHREADS_ECEN=${nth_ecen:-${nth_max}} + [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} + export APRUN_ECEN="${launcher} -n ${npe_ecen}" - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher $LEVS" + export NTHREADS_CHGRES=${nth_chgres:-12} + [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} + export APRUN_CHGRES="time" -elif [ $step = "post" ]; then + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} -n ${npe_ecen}" - nth_max=$(($npe_node_max / $npe_node_post)) +elif [[ "${step}" = "esfc" ]]; then - export NTHREADS_NP=${nth_np:-1} - [[ $NTHREADS_NP -gt $nth_max ]] && export NTHREADS_NP=$nth_max - export APRUN_NP="$launcher" + nth_max=$((npe_node_max / npe_node_esfc)) - export NTHREADS_DWN=${nth_dwn:-1} - [[ $NTHREADS_DWN -gt $nth_max ]] && export NTHREADS_DWN=$nth_max - export APRUN_DWN="$launcher" + export NTHREADS_ESFC=${nth_esfc:-${nth_max}} + [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} + export APRUN_ESFC="${launcher} -n ${npe_esfc}" -elif [ $step = "ecen" ]; then + export NTHREADS_CYCLE=${nth_cycle:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + export APRUN_CYCLE="${launcher} -n ${npe_esfc}" - nth_max=$(($npe_node_max / $npe_node_ecen)) +elif [[ "${step}" = "epos" ]]; then - export NTHREADS_ECEN=${nth_ecen:-$nth_max} - [[ $NTHREADS_ECEN -gt $nth_max ]] && export NTHREADS_ECEN=$nth_max - export APRUN_ECEN="$launcher" + nth_max=$((npe_node_max / npe_node_epos)) - export NTHREADS_CHGRES=${nth_chgres:-12} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="time" + export NTHREADS_EPOS=${nth_epos:-${nth_max}} + [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} + export APRUN_EPOS="${launcher} -n ${npe_epos}" - export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher" +elif [[ "${step}" = "postsnd" ]]; then -elif [ $step = "esfc" ]; then + export CFP_MP="YES" - nth_max=$(($npe_node_max / $npe_node_esfc)) + nth_max=$((npe_node_max / npe_node_postsnd)) - export NTHREADS_ESFC=${nth_esfc:-$nth_max} - [[ $NTHREADS_ESFC -gt $nth_max ]] && export NTHREADS_ESFC=$nth_max - export APRUN_ESFC="$launcher -n $npe_esfc" + export NTHREADS_POSTSND=${nth_postsnd:-1} + [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} + export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" - export NTHREADS_CYCLE=${nth_cycle:-14} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - export APRUN_CYCLE="$launcher -n $npe_esfc" + export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} + [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} + export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}" -elif [ $step = "epos" ]; then +elif [[ "${step}" = "awips" ]]; then - nth_max=$(($npe_node_max / $npe_node_epos)) + nth_max=$((npe_node_max / npe_node_awips)) - export NTHREADS_EPOS=${nth_epos:-$nth_max} - [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max - export APRUN_EPOS="$launcher" + export NTHREADS_AWIPS=${nth_awips:-2} + [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max} + export APRUN_AWIPSCFP="${launcher} -n ${npe_awips} ${mpmd_opt}" -elif [ $step = "init" ]; then +elif [[ "${step}" = "gempak" ]]; then - export APRUN="$launcher" + export CFP_MP="YES" -elif [ $step = "postsnd" ]; then + if [[ ${CDUMP} == "gfs" ]]; then + npe_gempak=${npe_gempak_gfs} + npe_node_gempak=${npe_node_gempak_gfs} + fi - nth_max=$(($npe_node_max / $npe_node_postsnd)) + nth_max=$((npe_node_max / npe_node_gempak)) - export NTHREADS_POSTSND=${nth_postsnd:-1} - [[ $NTHREADS_POSTSND -gt $nth_max ]] && export NTHREADS_POSTSND=$nth_max - export APRUN_POSTSND="$launcher" + export NTHREADS_GEMPAK=${nth_gempak:-1} + [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max} + export APRUN="${launcher} -n ${npe_gempak} ${mpmd_opt}" - export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} - [[ $NTHREADS_POSTSNDCFP -gt $nth_max ]] && export NTHREADS_POSTSNDCFP=$nth_max - export APRUN_POSTSNDCFP="$launcher" -elif [ $step = "awips" ]; then +elif [[ "${step}" = "fit2obs" ]]; then - echo "WARNING: $step is not enabled on $machine!" + nth_max=$((npe_node_max / npe_node_fit2obs)) -elif [ $step = "gempak" ]; then + export NTHREADS_FIT2OBS=${nth_fit2obs:-1} + [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} + export MPIRUN="${launcher} -n ${npe_fit2obs}" - echo "WARNING: $step is not enabled on $machine!" fi diff --git a/env/JET.env b/env/JET.env index d32c4dd7b4..1295054731 100755 --- a/env/JET.env +++ b/env/JET.env @@ -1,10 +1,11 @@ -#!/bin/ksh -x +#! /usr/bin/env bash -if [ $# -ne 1 ]; then +if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "anal fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" + echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" exit 1 @@ -13,15 +14,17 @@ fi step=$1 -if [[ "$PARTITION_BATCH" = "xjet" ]]; then +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then export npe_node_max=24 -elif [[ "$PARTITION_BATCH" = "vjet" || "$PARTITION_BATCH" = "sjet" ]]; then +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then export npe_node_max=16 -elif [[ "$PARTITION_BATCH" = "kjet" ]]; then +elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then export npe_node_max=40 fi -#JKHexport launcher="srun -l --export=ALL" export launcher="srun -l --epilog=/apps/local/bin/report-mem --export=ALL" +export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" # Configure MPI environment export OMP_STACKSIZE=2048000 @@ -30,213 +33,238 @@ export NTHSTACK=1024000000 ulimit -s unlimited ulimit -a -export job=${PBS_JOBNAME:-$step} -export jobid=${job}.${PBS_JOBID:-$$} - -if [ $step = "prep" -o $step = "prepbufr" ]; then +if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then - nth_max=$(($npe_node_max / $npe_node_prep)) + nth_max=$((npe_node_max / npe_node_prep)) export POE="NO" export BACK="NO" export sys_tp="JET" + export launcher_PREP="srun" + +elif [[ "${step}" = "preplandobs" ]]; then -elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostbndpntbll" -o $step = "wavepostpnt" ]; then + export APRUN_CALCFIMS="${launcher} -n 1" + +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then - export mpmd="--multi-prog" export CFP_MP="YES" - if [ $step = "waveprep" ]; then export MP_PULSE=0 ; fi + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi export wavempexec=${launcher} - export wave_mpmd=${mpmd} + export wave_mpmd=${mpmd_opt} -elif [ $step = "anal" ]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export MKL_NUM_THREADS=4 - export MKL_CBWR=AUTO + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" - nth_max=$(($npe_node_max / $npe_node_anal)) +elif [[ "${step}" = "atmensanlrun" ]]; then - export NTHREADS_GSI=${nth_anal:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher" + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} ${npe_atmensanlrun}" - export NTHREADS_CYCLE=${nth_cycle:-12} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - npe_cycle=${ntiles:-6} - export APRUN_CYCLE="$launcher -n $npe_cycle" +elif [[ "${step}" = "aeroanlrun" ]]; then + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export NTHREADS_GAUSFCANL=1 - npe_gausfcanl=${npe_gausfcanl:-1} - export APRUN_GAUSFCANL="$launcher -n $npe_gausfcanl" + nth_max=$((npe_node_max / npe_node_aeroanlrun)) - export NTHREADS_CHGRES=${nth_echgres:-1} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="" + export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} + [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" -elif [ $step = "gldas" ]; then +elif [[ "${step}" = "landanlrun" ]]; then - nth_max=$(($npe_node_max / $npe_node_gldas)) + nth_max=$((npe_node_max / npe_node_landanlrun)) - export NTHREADS_GLDAS=${nth_gldas:-$nth_max} - [[ $NTHREADS_GLDAS -gt $nth_max ]] && export NTHREADS_GLDAS=$nth_max - export APRUN_GLDAS="$launcher -n $npe_gldas" + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ $NTHREADS_GAUSSIAN -gt $nth_max ]] && export NTHREADS_GAUSSIAN=$nth_max - export APRUN_GAUSSIAN="$launcher -n $npe_gaussian" +elif [[ "${step}" = "ocnanalbmat" ]]; then -# Must run data processing with exactly the number of tasks as time -# periods being processed. + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - npe_gldas_data_proc=$(($gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="$launcher -n $npe_gldas_data_proc --multi-prog" + nth_max=$((npe_node_max / npe_node_ocnanalbmat)) -elif [ $step = "eobs" ]; then + export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" - export MKL_NUM_THREADS=4 - export MKL_CBWR=AUTO +elif [[ "${step}" = "ocnanalrun" ]]; then - nth_max=$(($npe_node_max / $npe_node_eobs)) + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export NTHREADS_GSI=${nth_eobs:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher" + nth_max=$((npe_node_max / npe_node_ocnanalrun)) + + export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then + + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" -elif [ $step = "eupd" ]; then + nth_max=$((npe_node_max / npe_node_anal)) - nth_max=$(($npe_node_max / $npe_node_eupd)) + export NTHREADS_GSI=${nth_anal:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}" - export NTHREADS_ENKF=${nth_eupd:-$nth_max} - [[ $NTHREADS_ENKF -gt $nth_max ]] && export NTHREADS_ENKF=$nth_max - export APRUN_ENKF="$launcher" + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} \$ncmd" - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export NTHREADS_CYCLE=${nth_cycle:-12} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_cycle=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_cycle}" -elif [ $step = "fcst" ]; then + export NTHREADS_GAUSFCANL=1 + npe_gausfcanl=${npe_gausfcanl:-1} + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" - #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs - if [[ $CDUMP == "gfs" ]]; then - npe_fcst=$npe_fcst_gfs - npe_node_fcst=$npe_node_fcst_gfs - nth_fv3=$nth_fv3_gfs - fi +elif [[ "${step}" = "sfcanl" ]]; then + nth_max=$((npe_node_max / npe_node_sfcanl)) - nth_max=$(($npe_node_max / $npe_node_fcst)) + export NTHREADS_CYCLE=${nth_sfcanl:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_sfcanl=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" - export NTHREADS_FV3=${nth_fv3:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - if [[ $CDUMP == "gfs" ]]; then - npe_fcst=$npe_fcst_gfs - fi - export APRUN_FV3="$launcher -n $npe_fcst" +elif [[ "${step}" = "eobs" ]]; then - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher" + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO - export NTHREADS_REMAP=${nth_remap:-2} - [[ $NTHREADS_REMAP -gt $nth_max ]] && export NTHREADS_REMAP=$nth_max - export APRUN_REMAP="$launcher" - export I_MPI_DAPL_UD="enable" + nth_max=$((npe_node_max / npe_node_eobs)) -elif [ $step = "efcs" ]; then + export NTHREADS_GSI=${nth_eobs:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}" - nth_max=$(($npe_node_max / $npe_node_efcs)) + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - export NTHREADS_FV3=${nth_efcs:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - export APRUN_FV3="$launcher -n $npe_efcs" +elif [[ "${step}" = "eupd" ]]; then - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher $LEVS" + nth_max=$((npe_node_max / npe_node_eupd)) -elif [ $step = "post" ]; then + export NTHREADS_ENKF=${nth_eupd:-${nth_max}} + [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}" - nth_max=$(($npe_node_max / $npe_node_post)) + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + +elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then + + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" + fi + (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) + (( ntasks = nnodes*${!ppn} )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ntasks}" + unset nprocs ppn nnodes ntasks + +elif [[ "${step}" = "post" ]]; then + + nth_max=$((npe_node_max / npe_node_post)) export NTHREADS_NP=${nth_np:-1} - [[ $NTHREADS_NP -gt $nth_max ]] && export NTHREADS_NP=$nth_max - export APRUN_NP="$launcher --epilog=/apps/local/bin/report-mem" ## JKH + [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} + export APRUN_NP="${launcher} -n ${npe_post}" export NTHREADS_DWN=${nth_dwn:-1} - [[ $NTHREADS_DWN -gt $nth_max ]] && export NTHREADS_DWN=$nth_max - export APRUN_DWN="$launcher --epilog=/apps/local/bin/report-mem" ## JKH + [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} + export APRUN_DWN="${launcher} -n ${npe_dwn}" -elif [ $step = "ecen" ]; then +elif [[ "${step}" = "ecen" ]]; then - nth_max=$(($npe_node_max / $npe_node_ecen)) + nth_max=$((npe_node_max / npe_node_ecen)) - export NTHREADS_ECEN=${nth_ecen:-$nth_max} - [[ $NTHREADS_ECEN -gt $nth_max ]] && export NTHREADS_ECEN=$nth_max - export APRUN_ECEN="$launcher" + export NTHREADS_ECEN=${nth_ecen:-${nth_max}} + [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} + export APRUN_ECEN="${launcher} -n ${npe_ecen}" export NTHREADS_CHGRES=${nth_chgres:-12} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max + [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} export APRUN_CHGRES="time" export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher" + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} -n ${npe_ecen}" -elif [ $step = "esfc" ]; then +elif [[ "${step}" = "esfc" ]]; then - nth_max=$(($npe_node_max / $npe_node_esfc)) + nth_max=$((npe_node_max / npe_node_esfc)) - export NTHREADS_ESFC=${nth_esfc:-$nth_max} - [[ $NTHREADS_ESFC -gt $nth_max ]] && export NTHREADS_ESFC=$nth_max - export APRUN_ESFC="$launcher -n $npe_esfc" + export NTHREADS_ESFC=${nth_esfc:-${nth_max}} + [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} + export APRUN_ESFC="${launcher} -n ${npe_esfc}" export NTHREADS_CYCLE=${nth_cycle:-14} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - export APRUN_CYCLE="$launcher -n $npe_esfc" + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + export APRUN_CYCLE="${launcher} -n ${npe_esfc}" -elif [ $step = "epos" ]; then +elif [[ "${step}" = "epos" ]]; then - nth_max=$(($npe_node_max / $npe_node_epos)) + nth_max=$((npe_node_max / npe_node_epos)) - export NTHREADS_EPOS=${nth_epos:-$nth_max} - [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max - export APRUN_EPOS="$launcher" + export NTHREADS_EPOS=${nth_epos:-${nth_max}} + [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} + export APRUN_EPOS="${launcher} -n ${npe_epos}" -elif [ $step = "init" ]; then +elif [[ "${step}" = "init" ]]; then - export APRUN="$launcher" + export APRUN="${launcher} -n ${npe_init}" -elif [ $step = "postsnd" ]; then +elif [[ "${step}" = "postsnd" ]]; then - nth_max=$(($npe_node_max / $npe_node_postsnd)) + export CFP_MP="YES" + + nth_max=$((npe_node_max / npe_node_postsnd)) export NTHREADS_POSTSND=${nth_postsnd:-1} - [[ $NTHREADS_POSTSND -gt $nth_max ]] && export NTHREADS_POSTSND=$nth_max - export APRUN_POSTSND="$launcher" + [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} + export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} - [[ $NTHREADS_POSTSNDCFP -gt $nth_max ]] && export NTHREADS_POSTSNDCFP=$nth_max - export APRUN_POSTSNDCFP="$launcher" + [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} + export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}" + +elif [[ "${step}" = "awips" ]]; then + + echo "WARNING: ${step} is not enabled on ${machine}!" + +elif [[ "${step}" = "gempak" ]]; then + + echo "WARNING: ${step} is not enabled on ${machine}!" -elif [ $step = "awips" ]; then +elif [[ "${step}" = "fit2obs" ]]; then - echo "WARNING: $step is not enabled on $machine!" + nth_max=$((npe_node_max / npe_node_fit2obs)) -elif [ $step = "gempak" ]; then + export NTHREADS_FIT2OBS=${nth_fit2obs:-1} + [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} + export MPIRUN="${launcher} -n ${npe_fit2obs}" - echo "WARNING: $step is not enabled on $machine!" fi diff --git a/env/ORION.env b/env/ORION.env index be89113c14..321ce9d3c5 100755 --- a/env/ORION.env +++ b/env/ORION.env @@ -1,10 +1,11 @@ -#!/bin/ksh -x +#! /usr/bin/env bash -if [ $# -ne 1 ]; then +if [[ $# -ne 1 ]]; then echo "Must specify an input argument to set runtime environment variables!" echo "argument can be any one of the following:" - echo "anal fcst post vrfy metp" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" + echo "anal sfcanl fcst post vrfy metp" echo "eobs eupd ecen efcs epos" echo "postsnd awips gempak" exit 1 @@ -15,6 +16,7 @@ step=$1 export npe_node_max=40 export launcher="srun -l --export=ALL" +export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" # Configure MPI environment export MPI_BUFS_PER_PROC=2048 @@ -30,206 +32,261 @@ export NTHSTACK=1024000000 ulimit -s unlimited ulimit -a -export job=${PBS_JOBNAME:-$step} -export jobid=${job}.${PBS_JOBID:-$$} +if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then -if [ $step = "prep" -o $step = "prepbufr" ]; then - - nth_max=$(($npe_node_max / $npe_node_prep)) + nth_max=$((npe_node_max / npe_node_prep)) export POE="NO" export BACK=${BACK:-"YES"} export sys_tp="ORION" + export launcher_PREP="srun" + +elif [[ "${step}" = "preplandobs" ]]; then -elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostpnt" ]; then + export APRUN_CALCFIMS="${launcher} -n 1" + +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || \ + [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostpnt" ]] || [[ "${step}" == "wavepostbndpntbll" ]]; then - export mpmd="--multi-prog" export CFP_MP="YES" - if [ $step = "waveprep" ]; then export MP_PULSE=0 ; fi + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi export wavempexec=${launcher} - export wave_mpmd=${mpmd} + export wave_mpmd=${mpmd_opt} -elif [ $step = "anal" ]; then +elif [[ "${step}" = "atmanlrun" ]]; then - export MKL_NUM_THREADS=4 - export MKL_CBWR=AUTO + nth_max=$((npe_node_max / npe_node_atmanlrun)) - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" - nth_max=$(($npe_node_max / $npe_node_anal)) +elif [[ "${step}" = "atmensanlrun" ]]; then - export NTHREADS_GSI=${nth_anal:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher" + nth_max=$((npe_node_max / npe_node_atmensanlrun)) - export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher" + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" - export NTHREADS_CYCLE=${nth_cycle:-12} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - npe_cycle=${ntiles:-6} - export APRUN_CYCLE="$launcher -n $npe_cycle" +elif [[ "${step}" = "aeroanlrun" ]]; then - export NTHREADS_GAUSFCANL=1 - npe_gausfcanl=${npe_gausfcanl:-1} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_aeroanlrun)) - export APRUN_GAUSFCANL="$launcher -n $npe_gausfcanl" + export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} + [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" -elif [ $step = "gldas" ]; then +elif [[ "${step}" = "landanlrun" ]]; then - nth_max=$(($npe_node_max / $npe_node_gldas)) + nth_max=$((npe_node_max / npe_node_landanlrun)) - export NTHREADS_GLDAS=${nth_gldas:-$nth_max} - [[ $NTHREADS_GLDAS -gt $nth_max ]] && export NTHREADS_GLDAS=$nth_max - export APRUN_GLDAS="$launcher -n $npe_gldas" + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ $NTHREADS_GAUSSIAN -gt $nth_max ]] && export NTHREADS_GAUSSIAN=$nth_max - export APRUN_GAUSSIAN="$launcher -n $npe_gaussian" +elif [[ "${step}" = "ocnanalbmat" ]]; then -# Must run data processing with exactly the number of tasks as time -# periods being processed. + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - npe_gldas_data_proc=$(($gldas_spinup_hours + 12)) - export APRUN_GLDAS_DATA_PROC="$launcher -n $npe_gldas_data_proc --multi-prog" + nth_max=$((npe_node_max / npe_node_ocnanalbmat)) -elif [ $step = "eobs" ]; then + export NTHREADS_OCNANAL=${nth_ocnanalbmat:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalbmat}" + +elif [[ "${step}" = "ocnanalrun" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalrun)) + + export NTHREADS_OCNANAL=${nth_ocnanalrun:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalrun}" + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_ocnanalchkpt)) + + export NTHREADS_OCNANAL=${nth_ocnanalchkpt:-${nth_max}} + [[ ${NTHREADS_OCNANAL} -gt ${nth_max} ]] && export NTHREADS_OCNANAL=${nth_max} + export APRUN_OCNANAL="${launcher} -n ${npe_ocnanalchkpt}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then export MKL_NUM_THREADS=4 export MKL_CBWR=AUTO export CFP_MP=${CFP_MP:-"YES"} export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" - nth_max=$(($npe_node_max / $npe_node_eobs)) + nth_max=$((npe_node_max / npe_node_anal)) - export NTHREADS_GSI=${nth_eobs:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher" + export NTHREADS_GSI=${nth_anal:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}" -elif [ $step = "eupd" ]; then + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} \$ncmd" - export CFP_MP=${CFP_MP:-"YES"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher -n \$ncmd --multi-prog" + export NTHREADS_CYCLE=${nth_cycle:-12} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_cycle=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_cycle}" - nth_max=$(($npe_node_max / $npe_node_eupd)) + export NTHREADS_GAUSFCANL=1 + npe_gausfcanl=${npe_gausfcanl:-1} + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" - export NTHREADS_ENKF=${nth_eupd:-$nth_max} - [[ $NTHREADS_ENKF -gt $nth_max ]] && export NTHREADS_ENKF=$nth_max - export APRUN_ENKF="$launcher" +elif [[ "${step}" = "sfcanl" ]]; then + nth_max=$((npe_node_max / npe_node_sfcanl)) -elif [ $step = "fcst" ]; then + export NTHREADS_CYCLE=${nth_sfcanl:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_sfcanl=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" - #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs - if [[ $CDUMP == "gfs" ]]; then - npe_fcst=$npe_fcst_gfs - npe_node_fcst=$npe_node_fcst_gfs - nth_fv3=$nth_fv3_gfs - fi +elif [[ "${step}" = "eobs" ]]; then - nth_max=$(($npe_node_max / $npe_node_fcst)) + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO - export NTHREADS_FV3=${nth_fv3:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - export APRUN_FV3="$launcher -n $npe_fcst" + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_eobs)) + + export NTHREADS_GSI=${nth_eobs:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}" - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher" +elif [[ "${step}" = "eupd" ]]; then - export NTHREADS_REMAP=${nth_remap:-2} - [[ $NTHREADS_REMAP -gt $nth_max ]] && export NTHREADS_REMAP=$nth_max - export APRUN_REMAP="$launcher" - export I_MPI_DAPL_UD="enable" + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" -elif [ $step = "efcs" ]; then + nth_max=$((npe_node_max / npe_node_eupd)) - nth_max=$(($npe_node_max / $npe_node_efcs)) + export NTHREADS_ENKF=${nth_eupd:-${nth_max}} + [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}" - export NTHREADS_FV3=${nth_efcs:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - export APRUN_FV3="$launcher -n $npe_efcs" +elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher $LEVS" + export OMP_STACKSIZE=512M + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" + fi + (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) + (( ntasks = nnodes*${!ppn} )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ntasks}" + unset nprocs ppn nnodes ntasks -elif [ $step = "post" ]; then +elif [[ "${step}" = "post" ]]; then - nth_max=$(($npe_node_max / $npe_node_post)) + nth_max=$((npe_node_max / npe_node_post)) export NTHREADS_NP=${nth_np:-1} - [[ $NTHREADS_NP -gt $nth_max ]] && export NTHREADS_NP=$nth_max - export APRUN_NP="$launcher" + [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} + export APRUN_NP="${launcher} -n ${npe_post}" export NTHREADS_DWN=${nth_dwn:-1} - [[ $NTHREADS_DWN -gt $nth_max ]] && export NTHREADS_DWN=$nth_max - export APRUN_DWN="$launcher" + [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} + export APRUN_DWN="${launcher} -n ${npe_dwn}" -elif [ $step = "ecen" ]; then +elif [[ "${step}" = "ecen" ]]; then - nth_max=$(($npe_node_max / $npe_node_ecen)) + nth_max=$((npe_node_max / npe_node_ecen)) - export NTHREADS_ECEN=${nth_ecen:-$nth_max} - [[ $NTHREADS_ECEN -gt $nth_max ]] && export NTHREADS_ECEN=$nth_max - export APRUN_ECEN="$launcher" + export NTHREADS_ECEN=${nth_ecen:-${nth_max}} + [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} + export APRUN_ECEN="${launcher} -n ${npe_ecen}" export NTHREADS_CHGRES=${nth_chgres:-12} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max + [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} export APRUN_CHGRES="time" export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher" + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} -n ${npe_ecen}" -elif [ $step = "esfc" ]; then +elif [[ "${step}" = "esfc" ]]; then - nth_max=$(($npe_node_max / $npe_node_esfc)) + nth_max=$((npe_node_max / npe_node_esfc)) - export NTHREADS_ESFC=${nth_esfc:-$nth_max} - [[ $NTHREADS_ESFC -gt $nth_max ]] && export NTHREADS_ESFC=$nth_max - export APRUN_ESFC="$launcher -n $npe_esfc" + export NTHREADS_ESFC=${nth_esfc:-${nth_max}} + [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} + export APRUN_ESFC="${launcher} -n ${npe_esfc}" export NTHREADS_CYCLE=${nth_cycle:-14} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - export APRUN_CYCLE="$launcher -n $npe_esfc" + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + export APRUN_CYCLE="${launcher} -n ${npe_esfc}" -elif [ $step = "epos" ]; then +elif [[ "${step}" = "epos" ]]; then - nth_max=$(($npe_node_max / $npe_node_epos)) + nth_max=$((npe_node_max / npe_node_epos)) - export NTHREADS_EPOS=${nth_epos:-$nth_max} - [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max - export APRUN_EPOS="$launcher" + export NTHREADS_EPOS=${nth_epos:-${nth_max}} + [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} + export APRUN_EPOS="${launcher} -n ${npe_epos}" -elif [ $step = "init" ]; then +elif [[ "${step}" = "postsnd" ]]; then - export APRUN="$launcher" - -elif [ $step = "postsnd" ]; then + export CFP_MP="YES" - nth_max=$(($npe_node_max / $npe_node_postsnd)) + nth_max=$((npe_node_max / npe_node_postsnd)) export NTHREADS_POSTSND=${nth_postsnd:-1} - [[ $NTHREADS_POSTSND -gt $nth_max ]] && export NTHREADS_POSTSND=$nth_max - export APRUN_POSTSND="$launcher" + [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} + export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} - [[ $NTHREADS_POSTSNDCFP -gt $nth_max ]] && export NTHREADS_POSTSNDCFP=$nth_max - export APRUN_POSTSNDCFP="$launcher" + [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} + export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}" + +elif [[ "${step}" = "awips" ]]; then + + nth_max=$((npe_node_max / npe_node_awips)) + + export NTHREADS_AWIPS=${nth_awips:-2} + [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max} + export APRUN_AWIPSCFP="${launcher} -n ${npe_awips} ${mpmd_opt}" + +elif [[ "${step}" = "gempak" ]]; then + + export CFP_MP="YES" + + if [[ ${CDUMP} == "gfs" ]]; then + npe_gempak=${npe_gempak_gfs} + npe_node_gempak=${npe_node_gempak_gfs} + fi + + nth_max=$((npe_node_max / npe_node_gempak)) + + export NTHREADS_GEMPAK=${nth_gempak:-1} + [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max} + export APRUN="${launcher} -n ${npe_gempak} ${mpmd_opt}" -elif [ $step = "awips" ]; then +elif [[ "${step}" = "fit2obs" ]]; then - echo "WARNING: $step is not enabled on $machine!" + nth_max=$((npe_node_max / npe_node_fit2obs)) -elif [ $step = "gempak" ]; then + export NTHREADS_FIT2OBS=${nth_fit2obs:-1} + [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} + export MPIRUN="${launcher} -n ${npe_fit2obs}" - echo "WARNING: $step is not enabled on $machine!" fi diff --git a/env/S4.env b/env/S4.env new file mode 100755 index 0000000000..da6f124232 --- /dev/null +++ b/env/S4.env @@ -0,0 +1,250 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + echo "argument can be any one of the following:" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" + echo "anal sfcanl fcst post vrfy metp" + echo "eobs eupd ecen efcs epos" + echo "postsnd awips gempak" + exit 1 + +fi + +step=$1 +PARTITION_BATCH=${PARTITION_BATCH:-"s4"} + +if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 +elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 +fi +export launcher="srun -l --export=ALL" +export mpmd_opt="--multi-prog --output=${step}.%J.%t.out" + +# Configure MPI environment +export OMP_STACKSIZE=2048000 +export NTHSTACK=1024000000 + +ulimit -s unlimited +ulimit -a + +if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then + + nth_max=$((npe_node_max / npe_node_prep)) + + export POE="NO" + export BACK="NO" + export sys_tp="S4" + export launcher_PREP="srun" + +elif [[ "${step}" = "preplandobs" ]]; then + + export APRUN_CALCFIMS="${launcher} -n 1" + +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + + export CFP_MP="YES" + if [[ "${step}" = "waveprep" ]]; then export MP_PULSE=0 ; fi + export wavempexec=${launcher} + export wave_mpmd=${mpmd_opt} + +elif [[ "${step}" = "atmanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_atmanlrun)) + + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_atmensanlrun)) + + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_aeroanlrun)) + + export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} + [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" + +elif [[ "${step}" = "landanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_landanlrun)) + + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + echo "WARNING: ${step} is not enabled on S4!" + +elif [[ "${step}" = "ocnanalrun" ]]; then + echo "WARNING: ${step} is not enabled on S4!" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then + + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_anal)) + + export NTHREADS_GSI=${nth_anal:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}}" + + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} \$ncmd" + + export NTHREADS_CYCLE=${nth_cycle:-12} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_cycle=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_cycle}" + + + export NTHREADS_GAUSFCANL=1 + npe_gausfcanl=${npe_gausfcanl:-1} + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" + +elif [[ "${step}" = "sfcanl" ]]; then + nth_max=$((npe_node_max / npe_node_sfcanl)) + + export NTHREADS_CYCLE=${nth_sfcanl:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_sfcanl=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" + +elif [[ "${step}" = "eobs" ]]; then + + export MKL_NUM_THREADS=4 + export MKL_CBWR=AUTO + + nth_max=$((npe_node_max / npe_node_eobs)) + + export NTHREADS_GSI=${nth_eobs:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}}" + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + +elif [[ "${step}" = "eupd" ]]; then + + nth_max=$((npe_node_max / npe_node_eupd)) + + export NTHREADS_ENKF=${nth_eupd:-${nth_max}} + [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}}" + + export CFP_MP=${CFP_MP:-"YES"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -n \$ncmd ${mpmd_opt}" + +elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then + + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" + fi + (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) + (( ntasks = nnodes*${!ppn} )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ntasks}" + unset nprocs ppn nnodes ntasks + +elif [[ "${step}" = "post" ]]; then + + nth_max=$((npe_node_max / npe_node_post)) + + export NTHREADS_NP=${nth_np:-1} + [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} + export APRUN_NP="${launcher} -n ${npe_post}" + + export NTHREADS_DWN=${nth_dwn:-1} + [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} + export APRUN_DWN="${launcher} -n ${npe_dwn}" + +elif [[ "${step}" = "ecen" ]]; then + + nth_max=$((npe_node_max / npe_node_ecen)) + + export NTHREADS_ECEN=${nth_ecen:-${nth_max}} + [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} + export APRUN_ECEN="${launcher} -n ${npe_ecen}" + + export NTHREADS_CHGRES=${nth_chgres:-12} + [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} + export APRUN_CHGRES="time" + + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} -n ${npe_ecen}" + +elif [[ "${step}" = "esfc" ]]; then + + nth_max=$((npe_node_max / npe_node_esfc)) + + export NTHREADS_ESFC=${nth_esfc:-${nth_max}} + [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} + export APRUN_ESFC="${launcher} -n ${npe_esfc}" + + export NTHREADS_CYCLE=${nth_cycle:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + export APRUN_CYCLE="${launcher} -n ${npe_esfc}" + +elif [[ "${step}" = "epos" ]]; then + + nth_max=$((npe_node_max / npe_node_epos)) + + export NTHREADS_EPOS=${nth_epos:-${nth_max}} + [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} + export APRUN_EPOS="${launcher} -n ${npe_epos}" + +elif [[ "${step}" = "postsnd" ]]; then + + export CFP_MP="YES" + + nth_max=$((npe_node_max / npe_node_postsnd)) + + export NTHREADS_POSTSND=${nth_postsnd:-1} + [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} + export APRUN_POSTSND="${launcher} -n ${npe_postsnd}" + + export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} + [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} + export APRUN_POSTSNDCFP="${launcher} -n ${npe_postsndcfp} ${mpmd_opt}" + +elif [[ "${step}" = "awips" ]]; then + + echo "WARNING: ${step} is not enabled on S4!" + +elif [[ "${step}" = "gempak" ]]; then + + echo "WARNING: ${step} is not enabled on S4!" + +elif [[ "${step}" = "fit2obs" ]]; then + + nth_max=$((npe_node_max / npe_node_fit2obs)) + + export NTHREADS_FIT2OBS=${nth_fit2obs:-1} + [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} + export MPIRUN="${launcher} -n ${npe_fit2obs}" + +fi diff --git a/env/WCOSS2.env b/env/WCOSS2.env new file mode 100755 index 0000000000..354de5cf44 --- /dev/null +++ b/env/WCOSS2.env @@ -0,0 +1,287 @@ +#! /usr/bin/env bash + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input argument to set runtime environment variables!" + echo "argument can be any one of the following:" + echo "atmanlrun atmensanlrun aeroanlrun landanlrun" + echo "anal sfcanl fcst post vrfy metp" + echo "eobs eupd ecen esfc efcs epos" + echo "postsnd awips gempak" + exit 1 + +fi + +step=$1 + +# WCOSS2 information +export launcher="mpiexec -l" +export mpmd_opt="--cpu-bind verbose,core cfp" + +export npe_node_max=128 + +if [[ "${step}" = "prep" ]] || [[ "${step}" = "prepbufr" ]]; then + + nth_max=$((npe_node_max / npe_node_prep)) + + export POE=${POE:-"YES"} + export BACK=${BACK:-"off"} + export sys_tp="wcoss2" + export launcher_PREP="mpiexec" + +elif [[ "${step}" = "preplandobs" ]]; then + + export APRUN_CALCFIMS="${launcher} -n 1" + +elif [[ "${step}" = "waveinit" ]] || [[ "${step}" = "waveprep" ]] || [[ "${step}" = "wavepostsbs" ]] || [[ "${step}" = "wavepostbndpnt" ]] || [[ "${step}" = "wavepostbndpntbll" ]] || [[ "${step}" = "wavepostpnt" ]]; then + + if [[ "${step}" = "waveprep" ]] && [[ "${CDUMP}" = "gfs" ]]; then export NTASKS=${NTASKS_gfs} ; fi + export wavempexec="${launcher} -np" + export wave_mpmd=${mpmd_opt} + +elif [[ "${step}" = "atmanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_atmanlrun)) + + export NTHREADS_ATMANL=${nth_atmanlrun:-${nth_max}} + [[ ${NTHREADS_ATMANL} -gt ${nth_max} ]] && export NTHREADS_ATMANL=${nth_max} + export APRUN_ATMANL="${launcher} -n ${npe_atmanlrun}" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_atmensanlrun)) + + export NTHREADS_ATMENSANL=${nth_atmensanlrun:-${nth_max}} + [[ ${NTHREADS_ATMENSANL} -gt ${nth_max} ]] && export NTHREADS_ATMENSANL=${nth_max} + export APRUN_ATMENSANL="${launcher} -n ${npe_atmensanlrun}" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" + + nth_max=$((npe_node_max / npe_node_aeroanlrun)) + + export NTHREADS_AEROANL=${nth_aeroanlrun:-${nth_max}} + [[ ${NTHREADS_AEROANL} -gt ${nth_max} ]] && export NTHREADS_AEROANL=${nth_max} + export APRUN_AEROANL="${launcher} -n ${npe_aeroanlrun}" + +elif [[ "${step}" = "landanlrun" ]]; then + + nth_max=$((npe_node_max / npe_node_landanlrun)) + + export NTHREADS_LANDANL=${nth_landanlrun:-${nth_max}} + [[ ${NTHREADS_LANDANL} -gt ${nth_max} ]] && export NTHREADS_LANDANL=${nth_max} + export APRUN_LANDANL="${launcher} -n ${npe_landanlrun}" + +elif [[ "${step}" = "anal" ]] || [[ "${step}" = "analcalc" ]]; then + + export OMP_PLACES=cores + export OMP_STACKSIZE=1G + export FI_OFI_RXM_SAR_LIMIT=3145728 + + if [[ "${step}" = "analcalc" ]]; then + export MPICH_MPIIO_HINTS="*:romio_cb_write=disable" + fi + + nth_max=$((npe_node_max / npe_node_anal)) + + export NTHREADS_GSI=${nth_anal:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_anal}} -ppn ${npe_node_anal} --cpu-bind depth --depth ${NTHREADS_GSI}" + + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} \$ncmd" + + export NTHREADS_CYCLE=${nth_cycle:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_cycle=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_cycle} -ppn ${npe_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}" + + export NTHREADS_GAUSFCANL=1 + npe_gausfcanl=${npe_gausfcanl:-1} + export APRUN_GAUSFCANL="${launcher} -n ${npe_gausfcanl}" + + export NTHREADS_CHGRES=${nth_echgres:-14} + [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} + export APRUN_CHGRES="" + + export CFP_MP=${CFP_MP:-"NO"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" + +elif [[ "${step}" = "sfcanl" ]]; then + + nth_max=$((npe_node_max / npe_node_sfcanl)) + + export NTHREADS_CYCLE=${nth_sfcanl:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + npe_sfcanl=${ntiles:-6} + export APRUN_CYCLE="${launcher} -n ${npe_sfcanl}" + +elif [[ "${step}" = "eobs" ]]; then + + export OMP_PLACES=cores + export OMP_STACKSIZE=1G + export FI_OFI_RXM_SAR_LIMIT=3145728 + + nth_max=$((npe_node_max / npe_node_eobs)) + + export NTHREADS_GSI=${nth_eobs:-${nth_max}} + [[ ${NTHREADS_GSI} -gt ${nth_max} ]] && export NTHREADS_GSI=${nth_max} + export APRUN_GSI="${launcher} -n ${npe_gsi:-${npe_eobs}} -ppn ${npe_node_eobs} --cpu-bind depth --depth ${NTHREADS_GSI}" + + export CFP_MP=${CFP_MP:-"NO"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" + +elif [[ "${step}" = "eupd" ]]; then + + export OMP_PLACES=cores + export OMP_STACKSIZE=2G + export MPICH_COLL_OPT_OFF=1 + export FI_OFI_RXM_SAR_LIMIT=3145728 + + nth_max=$((npe_node_max / npe_node_eupd)) + + export NTHREADS_ENKF=${nth_eupd:-${nth_max}} + [[ ${NTHREADS_ENKF} -gt ${nth_max} ]] && export NTHREADS_ENKF=${nth_max} + export APRUN_ENKF="${launcher} -n ${npe_enkf:-${npe_eupd}} -ppn ${npe_node_eupd} --cpu-bind depth --depth ${NTHREADS_ENKF}" + + export CFP_MP=${CFP_MP:-"NO"} + export USE_CFP=${USE_CFP:-"YES"} + export APRUNCFP="${launcher} -np \$ncmd ${mpmd_opt}" + +elif [[ "${step}" = "fcst" ]] || [[ "${step}" = "efcs" ]]; then + + if [[ "${CDUMP}" =~ "gfs" ]]; then + nprocs="npe_${step}_gfs" + ppn="npe_node_${step}_gfs" || ppn="npe_node_${step}" + else + nprocs="npe_${step}" + ppn="npe_node_${step}" + fi + (( nnodes = (${!nprocs}+${!ppn}-1)/${!ppn} )) + (( ntasks = nnodes*${!ppn} )) + # With ESMF threading, the model wants to use the full node + export APRUN_UFS="${launcher} -n ${ntasks} -ppn ${!ppn} --cpu-bind depth --depth 1" + unset nprocs ppn nnodes ntasks + + # TODO: Why are fcst and efcs so different on WCOSS2? + # TODO: Compare these with the ufs-weather-model regression test job card at: + # https://github.com/ufs-community/ufs-weather-model/blob/develop/tests/fv3_conf/fv3_qsub.IN_wcoss2 + export FI_OFI_RXM_RX_SIZE=40000 + export FI_OFI_RXM_TX_SIZE=40000 + if [[ "${step}" = "fcst" ]]; then + export OMP_PLACES=cores + export OMP_STACKSIZE=2048M + elif [[ "${step}" = "efcs" ]]; then + export MPICH_MPIIO_HINTS="*:romio_cb_write=disable" + export FI_OFI_RXM_SAR_LIMIT=3145728 + fi + +elif [[ "${step}" = "post" ]]; then + + nth_max=$((npe_node_max / npe_node_post)) + + export NTHREADS_NP=${nth_np:-1} + [[ ${NTHREADS_NP} -gt ${nth_max} ]] && export NTHREADS_NP=${nth_max} + export APRUN_NP="${launcher} -n ${npe_np:-${npe_post}} -ppn ${npe_node_post} --cpu-bind depth --depth ${NTHREADS_NP}" + + export NTHREADS_DWN=${nth_dwn:-1} + [[ ${NTHREADS_DWN} -gt ${nth_max} ]] && export NTHREADS_DWN=${nth_max} + export APRUN_DWN="${launcher} -np ${npe_dwn} ${mpmd_opt}" + +elif [[ "${step}" = "ecen" ]]; then + + nth_max=$((npe_node_max / npe_node_ecen)) + + export NTHREADS_ECEN=${nth_ecen:-${nth_max}} + [[ ${NTHREADS_ECEN} -gt ${nth_max} ]] && export NTHREADS_ECEN=${nth_max} + export APRUN_ECEN="${launcher} -n ${npe_ecen} -ppn ${npe_node_ecen} --cpu-bind depth --depth ${NTHREADS_ECEN}" + + export NTHREADS_CHGRES=${nth_chgres:-14} + [[ ${NTHREADS_CHGRES} -gt ${npe_node_max} ]] && export NTHREADS_CHGRES=${npe_node_max} + export APRUN_CHGRES="time" + + export NTHREADS_CALCINC=${nth_calcinc:-1} + [[ ${NTHREADS_CALCINC} -gt ${nth_max} ]] && export NTHREADS_CALCINC=${nth_max} + export APRUN_CALCINC="${launcher} -n ${npe_ecen}" + + export NTHREADS_CYCLE=${nth_cycle:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + export APRUN_CYCLE="${launcher} -n ${npe_ecen} -ppn ${npe_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}" + +elif [[ "${step}" = "esfc" ]]; then + + nth_max=$((npe_node_max / npe_node_esfc)) + + export NTHREADS_ESFC=${nth_esfc:-${nth_max}} + [[ ${NTHREADS_ESFC} -gt ${nth_max} ]] && export NTHREADS_ESFC=${nth_max} + export APRUN_ESFC="${launcher} -n ${npe_esfc} -ppn ${npe_node_esfc} --cpu-bind depth --depth ${NTHREADS_ESFC}" + + export NTHREADS_CYCLE=${nth_cycle:-14} + [[ ${NTHREADS_CYCLE} -gt ${npe_node_max} ]] && export NTHREADS_CYCLE=${npe_node_max} + export APRUN_CYCLE="${launcher} -n ${npe_esfc} -ppn ${npe_node_cycle} --cpu-bind depth --depth ${NTHREADS_CYCLE}" + +elif [[ "${step}" = "epos" ]]; then + + nth_max=$((npe_node_max / npe_node_epos)) + + export NTHREADS_EPOS=${nth_epos:-${nth_max}} + [[ ${NTHREADS_EPOS} -gt ${nth_max} ]] && export NTHREADS_EPOS=${nth_max} + export APRUN_EPOS="${launcher} -n ${npe_epos} -ppn ${npe_node_epos} --cpu-bind depth --depth ${NTHREADS_EPOS}" + +elif [[ "${step}" = "postsnd" ]]; then + + export MPICH_MPIIO_HINTS_DISPLAY=1 + export OMP_NUM_THREADS=1 + + nth_max=$((npe_node_max / npe_node_postsnd)) + + export NTHREADS_POSTSND=${nth_postsnd:-1} + [[ ${NTHREADS_POSTSND} -gt ${nth_max} ]] && export NTHREADS_POSTSND=${nth_max} + export APRUN_POSTSND="${launcher} -n ${npe_postsnd} --depth=${NTHREADS_POSTSND} --cpu-bind depth" + + export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} + [[ ${NTHREADS_POSTSNDCFP} -gt ${nth_max} ]] && export NTHREADS_POSTSNDCFP=${nth_max} + export APRUN_POSTSNDCFP="${launcher} -np ${npe_postsndcfp} ${mpmd_opt}" + +elif [[ "${step}" = "awips" ]]; then + + nth_max=$((npe_node_max / npe_node_awips)) + + export NTHREADS_AWIPS=${nth_awips:-2} + [[ ${NTHREADS_AWIPS} -gt ${nth_max} ]] && export NTHREADS_AWIPS=${nth_max} + export APRUN_AWIPSCFP="${launcher} -np ${npe_awips} ${mpmd_opt}" + +elif [[ "${step}" = "gempak" ]]; then + + if [[ ${CDUMP} == "gfs" ]]; then + npe_gempak=${npe_gempak_gfs} + npe_node_gempak=${npe_node_gempak_gfs} + fi + + nth_max=$((npe_node_max / npe_node_gempak)) + + export NTHREADS_GEMPAK=${nth_gempak:-1} + [[ ${NTHREADS_GEMPAK} -gt ${nth_max} ]] && export NTHREADS_GEMPAK=${nth_max} + export APRUN_GEMPAKCFP="${launcher} -np ${npe_gempak} ${mpmd_opt}" + +elif [[ "${step}" = "fit2obs" ]]; then + + nth_max=$((npe_node_max / npe_node_fit2obs)) + + export NTHREADS_FIT2OBS=${nth_fit2obs:-1} + [[ ${NTHREADS_FIT2OBS} -gt ${nth_max} ]] && export NTHREADS_FIT2OBS=${nth_max} + export MPIRUN="${launcher} -np ${npe_fit2obs}" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + unset PERL5LIB + +elif [[ "${step}" = "wafsgrib2" ]] || [[ "${step}" = "wafsgrib20p25" ]]; then + + export USE_CFP=${USE_CFP:-"YES"} + +fi diff --git a/env/WCOSS_C.env b/env/WCOSS_C.env deleted file mode 100755 index ea715a0ba6..0000000000 --- a/env/WCOSS_C.env +++ /dev/null @@ -1,245 +0,0 @@ -#!/bin/ksh -x - -if [ $# -ne 1 ]; then - - echo "Must specify an input argument to set runtime environment variables!" - echo "argument can be any one of the following:" - echo "anal fcst post vrfy metp" - echo "eobs eupd ecen efcs epos" - echo "postsnd awips gempak" - exit 1 - -fi - -step=$1 - -# Cray information -export launcher="aprun" -export mpmd="cfp" -export npe_node_max=24 - -# Configure MPI environment -export MPI_BUFS_PER_PROC=2048 -export MPI_BUFS_PER_HOST=2048 -export MPI_GROUP_MAX=256 -export MPI_MEMMAP_OFF=1 -export MP_STDOUTMODE="ORDERED" -export NTHSTACK=1024000000 -export OMP_STACKSIZE="2048M" -export KMP_AFFINITY="disabled" -export job=${LSB_JOBNAME:-$step} -export jobid=${job}.${LSB_JOBID:-$$} - -if [ $step = "prep" -o $step = "prepbufr" ]; then - - nth_max=$(($npe_node_max / $npe_node_prep)) - - export POE=${POE:-"YES"} - export sys_tp="Cray-XC40" - -elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostbndpntbll" -o $step = "wavepostpnt" ]; then - if [ $step = "waveprep" ]; then export MP_PULSE=0 ; fi - export wavempexec=${launcher} - export wave_mpmd=${mpmd} - -elif [ $step = "anal" ]; then - - nth_max=$(($npe_node_max / $npe_node_anal)) - - export NTHREADS_GSI=${nth_anal:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher -j 1 -n ${npe_gsi:-$npe_anal} -N $npe_node_anal -d $NTHREADS_GSI -cc depth" - - export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher -j 1 -n \$ncmd -N 1 -d $NTHREADS_CALCINC -cc depth" - - if [ ${USE_CFP:-"NO"} = "YES" ]; then - export APRUNCFP="$launcher -q -j 1 -n \$ncmd -N \$ncmd_max -d 1 $mpmd" - fi - - export NTHREADS_CYCLE=${nth_cycle:-12} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - npe_node_cycle=$(($npe_node_max / $NTHREADS_CYCLE)) - npe_cycle=${ntiles:-6} - [[ $npe_node_cycle -gt $npe_cycle ]] && npe_node_cycle=$npe_cycle - export APRUN_CYCLE="$launcher -j 1 -n $npe_cycle -N $npe_node_cycle -d $NTHREADS_CYCLE -cc depth" - - export NTHREADS_GAUSFCANL=1 - npe_node_gausfcanl=$(($npe_node_max / $NTHREADS_GAUSFCANL)) - npe_gausfcanl=${npe_gausfcanl:-1} - [[ $npe_node_gausfcanl -gt $npe_gausfcanl ]] && npe_node_gausfcanl=$npe_gausfcanl - export APRUN_GAUSFCANL="$launcher -j 1 -n $npe_gausfcanl -N $npe_node_gausfcanl -d $NTHREADS_GAUSFCANL -cc depth" - - export NTHREADS_CHGRES=${nth_echgres:-1} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="$launcher -j 1 -n 1 -N 1 -d $NTHREADS_CHGRES -cc depth" - -elif [ $step = "gldas" ]; then - - nth_max=$(($npe_node_max / $npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-$nth_max} - [[ $NTHREADS_GLDAS -gt $nth_max ]] && export NTHREADS_GLDAS=$nth_max - export APRUN_GLDAS="$launcher -j 1 -n $npe_gldas -N $npe_node_gldas -d $NTHREADS_GLDAS -cc depth" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ $NTHREADS_GAUSSIAN -gt $nth_max ]] && export NTHREADS_GAUSSIAN=$nth_max - export APRUN_GAUSSIAN="$launcher -j 1 -n $npe_gaussian -N $npe_node_gaussian -d $NTHREADS_GAUSSIAN -cc depth" - - export APRUN_GLDAS_DATA_PROC="$launcher -j 1 -n $npe_gldas -N $npe_node_gldas -d 1 $mpmd" - -elif [ $step = "eobs" ]; then - - nth_max=$(($npe_node_max / $npe_node_eobs)) - - export NTHREADS_GSI=${nth_anal:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher -j 1 -n ${npe_gsi:-$npe_eobs} -N $npe_node_eobs -d $NTHREADS_GSI -cc depth" - - if [ ${USE_CFP:-"NO"} = "YES" ]; then - export APRUNCFP="$launcher -q -j 1 -n \$ncmd -N \$ncmd_max -d 1 $mpmd" - fi - -elif [ $step = "eupd" ]; then - - nth_max=$(($npe_node_max / $npe_node_eupd)) - - export NTHREADS_ENKF=${nth_enkf:-$nth_max} - [[ $NTHREADS_ENKF -gt $nth_max ]] && export NTHREADS_ENKF=$nth_max - export APRUN_ENKF="$launcher -j 1 -n ${npe_enkf:-$npe_eupd} -N $npe_node_eupd -d $NTHREADS_ENKF -cc depth" - - if [ ${USE_CFP:-"NO"} = "YES" ]; then - export APRUNCFP="$launcher -q -j 1 -n \$ncmd -N \$ncmd_max -d 1 $mpmd" - fi - -elif [ $step = "fcst" ]; then - - #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs - if [[ $CDUMP == "gfs" ]]; then - npe_fcst=$npe_fcst_gfs - npe_node_fcst=$npe_node_fcst_gfs - nth_fv3=$nth_fv3_gfs - fi - - nth_max=$(($npe_node_max / $npe_node_fcst)) - - export NTHREADS_FV3=${nth_fv3:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - #export APRUN_FV3="$launcher -j 1 -n ${npe_fv3:-$npe_fcst} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth" - export APRUN_FV3="$launcher -j 1 -n ${npe_fcst} -N $npe_node_fcst -d $NTHREADS_FV3 -cc depth" - - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher -j 1 -n $LEVS -N $npe_node_fcst -d $NTHREADS_REGRID_NEMSIO -cc depth" - - export NTHREADS_REMAP=${nth_remap:-2} - [[ $NTHREADS_REMAP -gt $nth_max ]] && export NTHREADS_REMAP=$nth_max - export APRUN_REMAP="$launcher -j 1 -n ${npe_remap:-$npe_fcst} -N $npe_node_fcst -d $NTHREADS_REMAP -cc depth" - -elif [ $step = "efcs" ]; then - - nth_max=$(($npe_node_max / $npe_node_efcs)) - - export NTHREADS_FV3=${nth_fv3:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - export APRUN_FV3="$launcher -j 1 -n ${npe_fv3:-$npe_efcs} -N $npe_node_efcs -d $NTHREADS_FV3 -cc depth" - - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher -j 1 -n $LEVS -N $npe_node_efcs -d $NTHREADS_REGRID_NEMSIO -cc depth" - -elif [ $step = "post" ]; then - - nth_max=$(($npe_node_max / $npe_node_post)) - - export NTHREADS_NP=${nth_np:-1} - [[ $NTHREADS_NP -gt $nth_max ]] && export NTHREADS_NP=$nth_max - export APRUN_NP="$launcher -j 1 -n ${npe_np:-$npe_post} -N $npe_node_post -d $NTHREADS_NP -cc depth" - - export NTHREADS_DWN=${nth_dwn:-1} - [[ $NTHREADS_DWN -gt $nth_max ]] && export NTHREADS_DWN=$nth_max - export APRUN_DWN="$launcher -j 1 -n $npe_dwn -N $npe_node_dwn -d $NTHREADS_DWN $mpmd" - -elif [ $step = "ecen" ]; then - - nth_max=$(($npe_node_max / $npe_node_ecen)) - - export NTHREADS_ECEN=${nth_ecen:-$nth_max} - [[ $NTHREADS_ECEN -gt $nth_max ]] && export NTHREADS_ECEN=$nth_max - export APRUN_ECEN="$launcher -j 1 -n $npe_ecen -N $npe_node_ecen -d $NTHREADS_ECEN -cc depth" - - export NTHREADS_CHGRES=${nth_chgres:-12} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="$launcher -j 1 -n 1 -N 1 -d $NTHREADS_CHGRES -cc depth" - - export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher -j 1 -n $npe_ecen -N $npe_node_ecen -d $NTHREADS_CALCINC -cc depth" - -elif [ $step = "esfc" ]; then - - nth_max=$(($npe_node_max / $npe_node_esfc)) - - export NTHREADS_ESFC=${nth_esfc:-$nth_max} - [[ $NTHREADS_ESFC -gt $nth_max ]] && export NTHREADS_ESFC=$nth_max - npe_node_esfc=$(($npe_node_max / $NTHREADS_ESFC)) - export APRUN_ESFC="$launcher -j 1 -n $npe_esfc -N $npe_node_esfc -d $NTHREADS_ESFC -cc depth" - - export NTHREADS_CYCLE=${nth_cycle:-12} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - npe_node_cycle=$(($npe_node_max / $NTHREADS_CYCLE)) - export APRUN_CYCLE="$launcher -j 1 -n $npe_esfc -N $npe_node_cycle -d $NTHREADS_CYCLE -cc depth" - -elif [ $step = "epos" ]; then - - nth_max=$(($npe_node_max / $npe_node_epos)) - - export NTHREADS_EPOS=${nth_epos:-$nth_max} - [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max - export APRUN_EPOS="$launcher -j 1 -n $npe_epos -N $npe_node_epos -d $NTHREADS_EPOS -cc depth" - -elif [ $step = "init" ]; then - - export APRUN="$launcher" - -elif [ $step = "vrfy" ]; then - - export IOBUF_PARAMS="*:size=32M:count=4:verbose" - export APRUNTRACK="$launcher -j1 -n1 -N1 -d1 -cc depth" - -elif [ $step = "metp" ]; then - - export IOBUF_PARAMS="*:size=32M:count=4:verbose" - export APRUNTRACK="$launcher -j1 -n1 -N1 -d1 -cc depth" - -elif [ $step = "postsnd" ]; then - export IOBUF_PARAMS="sigf*:size=128M:count=20:prefetch=0:verbose,gfs_collectiv*:size=128M:count=2:prefetch=0:verbose,*.snd:size=128M:count=3:prefetch=0:verbose,*.sfc:size=32M:count=3:prefetch=0:verbose,bufr.*:size=8M:count=20:prefetch=0:verbose" - - nth_max=$(($npe_node_max / $npe_node_postsnd)) - - export NTHREADS_POSTSND=${nth_postsnd:-1} - [[ $NTHREADS_POSTSND -gt $nth_max ]] && export NTHREADS_POSTSND=$nth_max - export APRUN_POSTSND="$launcher -j 1 -n $npe_postsnd -N $npe_node_postsnd -d $NTHREADS_POSTSND -cc depth" - - export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} - [[ $NTHREADS_POSTSNDCFP -gt $nth_max ]] && export NTHREADS_POSTSNDCFP=$nth_max - export APRUN_POSTSNDCFP="$launcher -j 1 -n $npe_postsndcfp -N $npe_node_postsndcfp -d $NTHREADS_POSTSNDCFP $mpmd" - -elif [ $step = "awips" ]; then - nth_max=$(($npe_node_max / $npe_node_awips)) - - export NTHREADS_AWIPS=${nth_awips:-2} - [[ $NTHREADS_AWIPS -gt $nth_max ]] && export NTHREADS_AWIPS=$nth_max - export APRUN_AWIPSCFP="$launcher -j 1 -n $npe_awips -N $npe_node_awips -d $NTHREADS_AWIPS -cc depth $mpmd" - -elif [ $step = "gempak" ]; then - nth_max=$(($npe_node_max / $npe_node_gempak)) - - export NTHREADS_GEMPAK=${nth_gempak:-3} - [[ $NTHREADS_GEMPAK -gt $nth_max ]] && export NTHREADS_GEMPAK=$nth_max - export APRUN_GEMPAKCFP="$launcher -j 1 -n $npe_gempak -N $npe_node_gempak -d $NTHREADS_GEMPAK $mpmd" - -fi diff --git a/env/WCOSS_DELL_P3.env b/env/WCOSS_DELL_P3.env deleted file mode 100755 index 680549c4b2..0000000000 --- a/env/WCOSS_DELL_P3.env +++ /dev/null @@ -1,244 +0,0 @@ -#!/bin/ksh -x - -if [ $# -ne 1 ]; then - - echo "Must specify an input argument to set runtime environment variables!" - echo "argument can be any one of the following:" - echo "anal fcst post vrfy metp" - echo "eobs eupd ecen esfc efcs epos" - echo "postsnd awips gempak" - exit 1 - -fi - -step=$1 - -# WCOSS_DELL_P3 information -export launcher="mpirun -l -n" -export mpmd="cfp" - -export npe_node_max=28 -if [ "$QUEUE" = "dev2" -o "$QUEUE" = "devonprod2" -o "$QUEUE" = "devmax2" ]; then # WCOSS Dell 3.5 - export npe_node_max=40 -fi - -# Configure MPI environment -#export MPI_BUFS_PER_PROC=2048 -#export MPI_BUFS_PER_HOST=2048 -#export MPI_GROUP_MAX=256 -#export MPI_MEMMAP_OFF=1 -export MPI_LABELIO=YES -export MP_STDOUTMODE="ORDERED" -export KMP_STACKSIZE=2048M -export KMP_AFFINITY=scatter -export job=${LSB_JOBNAME:-$step} -export jobid=${job}.${LSB_JOBID:-$$} - -# get binding information -#export I_MPI_DEBUG=4 - -if [ $step = "prep" -o $step = "prepbufr" ]; then - - nth_max=$(($npe_node_max / $npe_node_prep)) - - export POE=${POE:-"YES"} - export BACK=${BACK:-"off"} - export sys_tp="Dell-p3" - -elif [ $step = "waveinit" -o $step = "waveprep" -o $step = "wavepostsbs" -o $step = "wavepostbndpnt" -o $step = "wavepostbndpntbll" -o $step = "wavepostpnt" ]; then - - if [ $step = "waveprep" ]; then export MP_PULSE=0 ; fi - export wavempexec=${launcher} - export wave_mpmd=${mpmd} - -elif [ $step = "anal" ]; then - - nth_max=$(($npe_node_max / $npe_node_anal)) - - export NTHREADS_GSI=${nth_anal:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher ${npe_gsi:-${npe_anal:-$PBS_NP}}" - - export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher \$ncmd" - - export NTHREADS_CYCLE=${nth_cycle:-14} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - npe_cycle=${ntiles:-6} - export APRUN_CYCLE="$launcher $npe_cycle" - - export NTHREADS_GAUSFCANL=1 - npe_gausfcanl=${npe_gausfcanl:-1} - export APRUN_GAUSFCANL="$launcher $npe_gausfcanl" - - export NTHREADS_CHGRES=${nth_echgres:-14} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="" - - export CFP_MP=${CFP_MP:-"NO"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher \$ncmd $mpmd" - -elif [ $step = "gldas" ]; then - - nth_max=$(($npe_node_max / $npe_node_gldas)) - - export NTHREADS_GLDAS=${nth_gldas:-$nth_max} - [[ $NTHREADS_GLDAS -gt $nth_max ]] && export NTHREADS_GLDAS=$nth_max - export APRUN_GLDAS="$launcher $npe_gldas" - - export NTHREADS_GAUSSIAN=${nth_gaussian:-1} - [[ $NTHREADS_GAUSSIAN -gt $nth_max ]] && export NTHREADS_GAUSSIAN=$nth_max - export APRUN_GAUSSIAN="$launcher $npe_gaussian" - - export APRUN_GLDAS_DATA_PROC="$launcher $npe_gldas $mpmd" - -elif [ $step = "eobs" ]; then - - nth_max=$(($npe_node_max / $npe_node_eobs)) - - export NTHREADS_GSI=${nth_eobs:-$nth_max} - [[ $NTHREADS_GSI -gt $nth_max ]] && export NTHREADS_GSI=$nth_max - export APRUN_GSI="$launcher ${npe_gsi:-${npe_eobs:-$PBS_NP}}" - - export CFP_MP=${CFP_MP:-"NO"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher \$ncmd $mpmd" - -elif [ $step = "eupd" ]; then - - nth_max=$(($npe_node_max / $npe_node_eupd)) - - export NTHREADS_ENKF=${nth_eupd:-$nth_max} - [[ $NTHREADS_ENKF -gt $nth_max ]] && export NTHREADS_ENKF=$nth_max - export APRUN_ENKF="$launcher ${npe_enkf:-${npe_eupd:-$PBS_NP}}" - - export CFP_MP=${CFP_MP:-"NO"} - export USE_CFP=${USE_CFP:-"YES"} - export APRUNCFP="$launcher \$ncmd $mpmd" - -elif [ $step = "fcst" ]; then - - #PEs and PEs/node can differ for GFS and GDAS forecasts if threading differs - if [[ $CDUMP == "gfs" ]]; then - npe_fcst=$npe_fcst_gfs - npe_node_fcst=$npe_node_fcst_gfs - nth_fv3=$nth_fv3_gfs - fi - - nth_max=$(($npe_node_max / $npe_node_fcst)) - - export NTHREADS_FV3=${nth_fv3:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - #export APRUN_FV3="$launcher ${npe_fv3:-${npe_fcst:-$PBS_NP}}" - export APRUN_FV3="$launcher ${npe_fcst:-$PBS_NP}" - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher $LEVS" - - export NTHREADS_REMAP=${nth_remap:-2} - [[ $NTHREADS_REMAP -gt $nth_max ]] && export NTHREADS_REMAP=$nth_max - export APRUN_REMAP="$launcher ${npe_remap:-${npe_fcst:-$PBS_NP}}" - export I_MPI_DAPL_UD="enable" - -elif [ $step = "efcs" ]; then - - nth_max=$(($npe_node_max / $npe_node_efcs)) - - export NTHREADS_FV3=${nth_efcs:-$nth_max} - [[ $NTHREADS_FV3 -gt $nth_max ]] && export NTHREADS_FV3=$nth_max - export cores_per_node=$npe_node_max - export APRUN_FV3="$launcher ${npe_fv3:-${npe_efcs:-$PBS_NP}}" - - export NTHREADS_REGRID_NEMSIO=${nth_regrid_nemsio:-1} - [[ $NTHREADS_REGRID_NEMSIO -gt $nth_max ]] && export NTHREADS_REGRID_NEMSIO=$nth_max - export APRUN_REGRID_NEMSIO="$launcher $LEVS" - -elif [ $step = "post" ]; then - - nth_max=$(($npe_node_max / $npe_node_post)) - - export NTHREADS_NP=${nth_np:-1} - [[ $NTHREADS_NP -gt $nth_max ]] && export NTHREADS_NP=$nth_max - export APRUN_NP="$launcher ${npe_np:-${npe_post:-$PBS_NP}}" - - export NTHREADS_DWN=${nth_dwn:-1} - [[ $NTHREADS_DWN -gt $nth_max ]] && export NTHREADS_DWN=$nth_max - export APRUN_DWN="$launcher ${npe_dwn:-$PBS_NP} $mpmd" - -elif [ $step = "ecen" ]; then - - nth_max=$(($npe_node_max / $npe_node_ecen)) - - export NTHREADS_ECEN=${nth_ecen:-$nth_max} - [[ $NTHREADS_ECEN -gt $nth_max ]] && export NTHREADS_ECEN=$nth_max - export APRUN_ECEN="$launcher ${npe_ecen:-$PBS_NP}" - - export NTHREADS_CHGRES=${nth_chgres:-14} - [[ $NTHREADS_CHGRES -gt $npe_node_max ]] && export NTHREADS_CHGRES=$npe_node_max - export APRUN_CHGRES="time" - - export NTHREADS_CALCINC=${nth_calcinc:-1} - [[ $NTHREADS_CALCINC -gt $nth_max ]] && export NTHREADS_CALCINC=$nth_max - export APRUN_CALCINC="$launcher ${npe_ecen:-$PBS_NP}" - - export NTHREADS_CYCLE=${nth_cycle:-14} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - export APRUN_CYCLE="$launcher $npe_ecen" - -elif [ $step = "esfc" ]; then - - nth_max=$(($npe_node_max / $npe_node_esfc)) - - export NTHREADS_ESFC=${nth_esfc:-$nth_max} - [[ $NTHREADS_ESFC -gt $nth_max ]] && export NTHREADS_ESFC=$nth_max - export APRUN_ESFC="$launcher ${npe_esfc:-$PBS_NP}" - - export NTHREADS_CYCLE=${nth_cycle:-14} - [[ $NTHREADS_CYCLE -gt $npe_node_max ]] && export NTHREADS_CYCLE=$npe_node_max - export APRUN_CYCLE="$launcher $npe_esfc" - -elif [ $step = "epos" ]; then - - nth_max=$(($npe_node_max / $npe_node_epos)) - - export NTHREADS_EPOS=${nth_epos:-$nth_max} - [[ $NTHREADS_EPOS -gt $nth_max ]] && export NTHREADS_EPOS=$nth_max - export APRUN_EPOS="$launcher ${npe_epos:-$PBS_NP}" - -elif [ $step = "init" ]; then - - export APRUN="mpirun" - -elif [ $step = "postsnd" ]; then - - nth_max=$(($npe_node_max / $npe_node_postsnd)) - - export NTHREADS_POSTSND=${nth_postsnd:-1} - [[ $NTHREADS_POSTSND -gt $nth_max ]] && export NTHREADS_POSTSND=$nth_max - export APRUN_POSTSND="$launcher $npe_postsnd" - - export NTHREADS_POSTSNDCFP=${nth_postsndcfp:-1} - [[ $NTHREADS_POSTSNDCFP -gt $nth_max ]] && export NTHREADS_POSTSNDCFP=$nth_max - export APRUN_POSTSNDCFP="$launcher $npe_postsndcfp $mpmd" - -elif [ $step = "awips" ]; then - - nth_max=$(($npe_node_max / $npe_node_awips)) - - export NTHREADS_AWIPS=${nth_awips:-2} - [[ $NTHREADS_AWIPS -gt $nth_max ]] && export NTHREADS_AWIPS=$nth_max - export APRUN_AWIPSCFP="$launcher ${npe_awips:-$PBS_NP} $mpmd" - -elif [ $step = "gempak" ]; then - - nth_max=$(($npe_node_max / $npe_node_gempak)) - - export NTHREADS_GEMPAK=${nth_gempak:-1} - [[ $NTHREADS_GEMPAK -gt $nth_max ]] && export NTHREADS_GEMPAK=$nth_max - export APRUN_GEMPAKCFP="$launcher \$ntasks $mpmd" - - -fi diff --git a/env/gfs.ver b/env/gfs.ver deleted file mode 100644 index a8f32bd289..0000000000 --- a/env/gfs.ver +++ /dev/null @@ -1,22 +0,0 @@ -export gfs_ver=v15.0.0 - -export crtm_ver=2.3.0 -export hwrf_ver=v11.0.0 -export g2tmpl_ver=1.4.0 - -export grib_util_ver=1.1.0 -export util_shared_ver=1.0.6 -export cfp_intel_sandybridge_ver=1.1.0 -export iobuf_ver=2.0.7 -export ESMF_intel_sandybridge_ver=3_1_0rp5 -export ESMF_intel_haswell_ver=3_1_0rp5 -export gempak_ver=7.3.3 -export old_gempak_ver=6.32.0 -export NCL_gnu_sandybridge_ver=6.3.0 -export ncarg_intel_sandybridge_ver=6.1.0 -export dumpjb_ver=5.1.0 - -## FOLLOWING are used by JGDAS_TROPC -export obsproc_dump_ver=v4.0.0 -export obsproc_shared_bufr_dumplist_ver=v1.5.0 - diff --git a/gempak/ush/gdas_ukmet_meta_ver.sh b/gempak/ush/gdas_ukmet_meta_ver.sh index dcc350ea74..845fa1cc6b 100755 --- a/gempak/ush/gdas_ukmet_meta_ver.sh +++ b/gempak/ush/gdas_ukmet_meta_ver.sh @@ -139,8 +139,7 @@ for area in $areas sdatenum=$sdate9 cyclenum=$cycle9 fi - # JY grid="$COMROOT/nawips/${envir}/ukmet.20${sdatenum}/ukmet_20${sdatenum}${cyclenum}${dgdattim}" - grid="${COMINukmet}.20${sdatenum}/ukmet_20${sdatenum}${cyclenum}${dgdattim}" + grid="${COMINukmet}.20${sdatenum}/gempak/ukmet_20${sdatenum}${cyclenum}${dgdattim}" # 500 MB HEIGHT METAFILE diff --git a/gempak/ush/gempak_gfs_f00_gif.sh b/gempak/ush/gempak_gfs_f00_gif.sh index 172cb687a2..2a7cca5c9f 100755 --- a/gempak/ush/gempak_gfs_f00_gif.sh +++ b/gempak/ush/gempak_gfs_f00_gif.sh @@ -593,7 +593,7 @@ if [ $SENDCOM = YES ]; then export input=${COMOUT}/${hgttmp500dev} export HEADER=YES export OUTPATH=$DATA/gfs_500_hgt_tmp_nh_anl_${cyc}.tif - ${UTILgfs}/ush/make_tif.sh + ${USHgfs}/make_tif.sh fi msg=" GEMPAK_GIF ${fhr} hour completed normally" diff --git a/gempak/ush/gfs_meta_comp.sh b/gempak/ush/gfs_meta_comp.sh index 4e9a3d8820..9bd27c5736 100755 --- a/gempak/ush/gfs_meta_comp.sh +++ b/gempak/ush/gfs_meta_comp.sh @@ -217,8 +217,7 @@ export err=$?;err_chk done # COMPARE THE 1200 UTC GFS MODEL TO THE 0000 UTC UKMET MODEL grid="F-${MDL} | ${PDY2}/${cyc}00" - # JY export HPCUKMET=$COMROOT/nawips/prod/ukmet.${PDY} - export HPCUKMET=${COMINukmet}.${PDY} + export HPCUKMET=${COMINukmet}.${PDY}/gempak grid2="F-UKMETHPC | ${PDY2}/0000" # for gfsfhr in 00 12 24 36 48 60 84 108 for gfsfhr in 00 12 24 84 108 @@ -593,8 +592,7 @@ export err=$?;err_chk done # COMPARE THE 0000 UTC GFS MODEL TO THE 1200 UTC UKMET FROM YESTERDAY grid="F-${MDL} | ${PDY2}/${cyc}00" - #XXW export HPCUKMET=${MODEL}/ukmet.${PDYm1} - export HPCUKMET=${COMINukmet}.${PDYm1} + export HPCUKMET=${COMINukmet}.${PDYm1}/gempak grid2="F-UKMETHPC | ${PDY2m1}/1200" # for gfsfhr in 00 12 24 36 48 60 84 108 for gfsfhr in 00 12 24 84 108 diff --git a/gempak/ush/gfs_meta_crb.sh b/gempak/ush/gfs_meta_crb.sh index 4800578238..82fa7795e8 100755 --- a/gempak/ush/gfs_meta_crb.sh +++ b/gempak/ush/gfs_meta_crb.sh @@ -260,11 +260,8 @@ export err=$?;err_chk if [ ${cyc} -eq 00 ] ; then - # BV export MODEL=/com/nawips/prod - # JY export HPCECMWF=${MODEL}/ecmwf.${PDY} - # JY export HPCUKMET=${MODEL}/ukmet.${PDYm1} export HPCECMWF=${COMINecmwf}.${PDY}/gempak - export HPCUKMET=${COMINukmet}.${PDYm1} + export HPCUKMET=${COMINukmet}.${PDYm1}/gempak grid1="F-${MDL} | ${PDY2}/${cyc}00" grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12" grid3="F-UKMETHPC | ${PDY2m1}/1200" diff --git a/gempak/ush/gfs_meta_hur.sh b/gempak/ush/gfs_meta_hur.sh index 9590f07b3e..aed25d6d78 100755 --- a/gempak/ush/gfs_meta_hur.sh +++ b/gempak/ush/gfs_meta_hur.sh @@ -338,7 +338,7 @@ if [ ${cyc} -eq 00 ] ; then # JY export HPCECMWF=${MODEL}/ecmwf.${PDY} # JY export HPCUKMET=${MODEL}/ukmet.${PDY} export HPCECMWF=${COMINecmwf}.${PDY}/gempak - export HPCUKMET=${COMINukmet}.${PDY} + export HPCUKMET=${COMINukmet}.${PDY}/gempak grid1="F-${MDL} | ${PDY2}/${cyc}00" grid2="${COMINecmwf}.${PDYm1}/gempak/ecmwf_glob_${PDYm1}12" grid3="F-UKMETHPC | ${PDY2}/${cyc}00" diff --git a/gempak/ush/gfs_meta_mar_comp.sh b/gempak/ush/gfs_meta_mar_comp.sh index d7262814b5..a55fa3c642 100755 --- a/gempak/ush/gfs_meta_mar_comp.sh +++ b/gempak/ush/gfs_meta_mar_comp.sh @@ -181,7 +181,7 @@ export err=$?;err_chk done # COMPARE THE 1200 UTC GFS MODEL TO THE 0000 UTC UKMET MODEL grid="F-${MDL} | ${PDY2}/${cyc}00" - export HPCUKMET=${COMINukmet}.${PDY} + export HPCUKMET=${COMINukmet}.${PDY}/gempak grid2="F-UKMETHPC | ${PDY2}/0000" # for gfsfhr in 00 12 24 36 48 60 84 108 for gfsfhr in 00 12 24 84 108 @@ -534,7 +534,7 @@ export err=$?;err_chk done # COMPARE THE 0000 UTC GFS MODEL TO THE 1200 UTC UKMET FROM YESTERDAY grid="F-${MDL} | ${PDY2}/${cyc}00" - export HPCUKMET=${COMINukmet}.${PDYm1} + export HPCUKMET=${COMINukmet}.${PDYm1}/gempak grid2="F-UKMETHPC | ${PDY2m1}/1200" # for gfsfhr in 00 12 24 36 48 60 84 108 for gfsfhr in 00 12 24 84 108 diff --git a/gempak/ush/gfs_meta_sa2.sh b/gempak/ush/gfs_meta_sa2.sh index de538a249d..a566031030 100755 --- a/gempak/ush/gfs_meta_sa2.sh +++ b/gempak/ush/gfs_meta_sa2.sh @@ -303,7 +303,7 @@ do ukmetfhr=${gfsfhr} fi gfsfhr="F${gfsfhr}" - grid3="${COMINukmet}.${PDY}/ukmet_${PDY}00f${ukmetfhr}" + grid3="${COMINukmet}.${PDY}/gempak/ukmet_${PDY}00f${ukmetfhr}" $GEMEXE/gdplot2_nc << EOF25 \$MAPFIL = mepowo.gsf diff --git a/jkhINFO b/jkhINFO new file mode 100644 index 0000000000..50f41bcc94 --- /dev/null +++ b/jkhINFO @@ -0,0 +1,37 @@ +31may + - seem to be running out of memory + * try increasing # of write tasks per group + * changed from 10 to 12 ==> increase #nodes by 8 + get MPI abort command; wrote Walter/Kate/David + +29may23 + - update to UFS hash, 1ac938c, 12may23 + Joe's update + + - + +19may23 + - update to top of develop (19May23) + - test UFS hash in checkout.sh (14Apr23 - 2247060) + + sh checkout.sh + sh build_all.sh + sh link_workflow.sh etc jet + + + get error when running setup_expt.py + Traceback (most recent call last): + File "/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/workflow/./setup_expt.py", line 13, in + from hosts import Host + File "/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/workflow/hosts.py", line 6, in + from pygw.yaml_file import YAMLFile + File "/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/workflow/pygw/yaml_file.py", line 9, in + from .jinja import Jinja + File "/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw/workflow/pygw/jinja.py", line 4, in + import jinja2 + ModuleNotFoundError: No module named 'jinja2' + + wrong Python environment ?? + * sent email to Kate, Walter, David + + sample xml file from David's directory + /lfs1/NESDIS/nesdis-rdo2/David.Huber/para/exp/384/384.xml diff --git a/jobs/JGDAS_ATMOS_ANALYSIS_DIAG b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG new file mode 100755 index 0000000000..6ad5c8f31b --- /dev/null +++ b/jobs/JGDAS_ATMOS_ANALYSIS_DIAG @@ -0,0 +1,59 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal analdiag" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP="${RUN/enkf}" +export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -${assim_freq} ${PDY}${cyc}) +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${RUN}.t${cyc}z." + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" + +############################################################### +# Run relevant script +${ANALDIAGSH:-${SCRgfs}/exglobal_diag.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGDAS_ATMOS_CHGRES_FORENKF b/jobs/JGDAS_ATMOS_CHGRES_FORENKF new file mode 100755 index 0000000000..1bbed53586 --- /dev/null +++ b/jobs/JGDAS_ATMOS_CHGRES_FORENKF @@ -0,0 +1,49 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal echgres" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${RUN/enkf} +export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export APREFIX="${CDUMP}.t${cyc}z." +export APREFIX_ENS="${RUN}.t${cyc}z." + +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +MEMDIR="mem001" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY_MEM:COM_ATMOS_HISTORY_TMPL + +############################################################### +# Run relevant script +${CHGRESFCSTSH:-${SCRgfs}/exgdas_atmos_chgres_forenkf.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGDAS_ATMOS_GEMPAK b/jobs/JGDAS_ATMOS_GEMPAK index e2d3d89da7..f0131ffb94 100755 --- a/jobs/JGDAS_ATMOS_GEMPAK +++ b/jobs/JGDAS_ATMOS_GEMPAK @@ -1,43 +1,20 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak" -############################################ -# GDAS GEMPAK PRODUCT GENERATION -############################################ - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY +# TODO (#1219) This j-job is not part of the rocoto suite ################################ # Set up the HOME directory -################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} -export UTILgfs=${UTILgfs:-$HOMEgfs/util} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo} +export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product} +export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix} +export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush} +export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts} +export UTILgfs=${UTILgfs:-${HOMEgfs}/util} ############################################ # Set up model and cycle specific variables @@ -50,80 +27,75 @@ export GRIB=pgrb2f export EXT="" export DBN_ALERT_TYPE=GDAS_GEMPAK +export SENDCOM=${SENDCOM:-NO} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} export model=${model:-gdas} -export COMPONENT=${COMPONENT:-atmos} ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} +for grid in 0p25 0p50 1p00; do + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" +done -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi +for grid in 1p00 0p25; do + prod_dir="COM_ATMOS_GEMPAK_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL" -export pgmout=OUTPUT.$$ + if [[ ${SENDCOM} == YES && ! -d "${!prod_dir}" ]] ; then + mkdir -m 775 -p "${!prod_dir}" + fi +done -env -if [ -f $DATA/poescrip ]; then - rm $DATA/poescript +# TODO: These actions belong in an ex-script not a j-job +if [[ -f poescript ]]; then + rm -f poescript fi ######################################################## # Execute the script. -echo "$SRCgfs/exgdas_atmos_nawips.sh gdas 009 GDAS_GEMPAK " >> poescript +echo "${SRCgfs}/exgdas_atmos_nawips.sh gdas 009 GDAS_GEMPAK ${COM_ATMOS_GEMPAK_1p00}" >> poescript ######################################################## ######################################################## # Execute the script for quater-degree grib -echo "$SRCgfs/exgdas_atmos_nawips.sh gdas_0p25 009 GDAS_GEMPAK " >>poescript +echo "${SRCgfs}/exgdas_atmos_nawips.sh gdas_0p25 009 GDAS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}" >> poescript ######################################################## cat poescript -chmod 775 $DATA/poescript +chmod 775 ${DATA}/poescript export MP_PGMMODEL=mpmd -export MP_CMDFILE=$DATA/poescript +export MP_CMDFILE=${DATA}/poescript -ntasks=${NTASKS_GEMPAK:-$(cat $DATA/poescript | wc -l)} +ntasks=${NTASKS_GEMPAK:-$(cat ${DATA}/poescript | wc -l)} ptile=${PTILE_GEMPAK:-4} threads=${NTHREADS_GEMPAK:-1} -export OMP_NUM_THREADS=$threads -APRUN="mpirun -n $ntasks cfp " +export OMP_NUM_THREADS=${threads} +APRUN="mpiexec -l -np ${ntasks} --cpu-bind verbose,core cfp" -APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-$APRUN} -APRUNCFP=$(eval echo $APRUN_GEMPAKCFP) +APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-${APRUN}} -$APRUNCFP $DATA/poescript +${APRUN_GEMPAKCFP} ${DATA}/poescript export err=$?; err_chk -######################################################## - -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf ${DATA} fi -date diff --git a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC index 12951c2c53..beadb7ccf8 100755 --- a/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC +++ b/jobs/JGDAS_ATMOS_GEMPAK_META_NCDC @@ -1,56 +1,37 @@ -#!/bin/sh - -set -xa -export PS4='$SECONDS + ' -date +#! /usr/bin/env bash ############################################ # GDAS GEMPAK META NCDC PRODUCT GENERATION ############################################ -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} +# TODO (#1222) This j-job is not part of the rocoto -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_meta" -c "base gempak" ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} -export UTILgfs=${UTILgfs:-$HOMEgfs/util} +export HOMEgfs=${HOMEgfs:-${PACKAGEROOT}/gfs.${gfs_ver}} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo} +export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product} +export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix} +export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush} +export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts} +export UTILgfs=${UTILgfs:-${HOMEgfs}/util} # # Now set up GEMPAK/NTRANS environment # -cp $FIXgempak/datatype.tbl datatype.tbl +cp ${FIXgempak}/datatype.tbl datatype.tbl ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gdas} -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" export MODEL=GDAS export GRID_NAME=gdas export fend=09 @@ -69,31 +50,30 @@ export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT/gempak} -export COMINgdas=${COMINgdas:-$(compath.py ${NET}/${envir}/${RUN})} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak/meta} -export COMOUTncdc=${COMOUTncdc:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}/gempak} +export COMINgdas=${COMINgdas:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}} +export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}/gempak/meta} +export COMOUTncdc=${COMOUTncdc:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} +export COMINukmet=${COMINukmet:-$(compath.py ${envir}/ukmet/${ukmet_ver})/ukmet} +export COMINecmwf=${COMINecmwf:-$(compath.py ${envir}/ecmwf/${ecmwf_ver})/ecmwf} export COMOUTukmet=${COMOUT} export COMOUTecmwf=${COMOUT} -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTncdc $COMOUTukmet $COMOUTecmwf +if [ ${SENDCOM} = YES ] ; then + mkdir -m 775 -p ${COMOUT} ${COMOUTncdc} ${COMOUTukmet} ${COMOUTecmwf} fi export pgmout=OUTPUT.$$ -env ######################################################## # Execute the script. -$USHgempak/gdas_meta_na.sh -$USHgempak/gdas_ecmwf_meta_ver.sh -$USHgempak/gdas_meta_loop.sh -$USHgempak/gdas_ukmet_meta_ver.sh +${USHgempak}/gdas_meta_na.sh +${USHgempak}/gdas_ecmwf_meta_ver.sh +${USHgempak}/gdas_meta_loop.sh +${USHgempak}/gdas_ukmet_meta_ver.sh export err=$?; err_chk ######################################################## @@ -103,25 +83,21 @@ export err=$?; err_chk ######################################################## # Execute the script. -$SRCgfs/exgdas_atmos_gempak_gif_ncdc.sh +${SRCgfs}/exgdas_atmos_gempak_gif_ncdc.sh export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf ${DATA} fi -date diff --git a/jobs/JGDAS_ATMOS_VERFOZN b/jobs/JGDAS_ATMOS_VERFOZN new file mode 100755 index 0000000000..deccc0b28e --- /dev/null +++ b/jobs/JGDAS_ATMOS_VERFOZN @@ -0,0 +1,86 @@ +#! /usr/bin/env bash + +############################################################# +# Set up environment for GDAS Ozone Monitor job +############################################################# +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" + +export OZNMON_SUFFIX=${OZNMON_SUFFIX:-${NET}} + +#--------------------------------------------- +# Specify Execution Areas +# +export HOMEgfs_ozn=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export HOMEgdas_ozn=${HOMEgfs_ozn:-${NWROOT}/gfs.${gfs_ver}} +export PARMgdas_ozn=${PARMgfs_ozn:-${HOMEgfs_ozn}/parm/mon} +export SCRgdas_ozn=${SCRgfs_ozn:-${HOMEgfs_ozn}/scripts} +export FIXgdas_ozn=${FIXgfs_ozn:-${HOMEgfs_ozn}/fix/gdas} + +export HOMEoznmon=${HOMEoznmon:-${HOMEgfs_ozn}} +export EXECoznmon=${EXECoznmon:-${HOMEoznmon}/exec} +export FIXoznmon=${FIXoznmon:-${HOMEoznmon}/fix} +export USHoznmon=${USHoznmon:-${HOMEoznmon}/ush} + + +#----------------------------------- +# source the parm file +# +. ${PARMgdas_ozn}/gdas_oznmon.parm + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +pdate=$(${NDATE} -6 ${PDY}${cyc}) +echo "pdate = ${pdate}" + +export P_PDY=${pdate:0:8} +export p_cyc=${pdate:8:2} + +#--------------------------------------------- +# OZN_TANKDIR - WHERE OUTPUT DATA WILL RESIDE +# +export OZN_TANKDIR=${OZN_TANKDIR:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export TANKverf_ozn=${TANKverf_ozn:-${OZN_TANKDIR}/${RUN}.${PDY}/${cyc}/atmos/oznmon} +export TANKverf_oznM1=${TANKverf_oznM1:-${OZN_TANKDIR}/${RUN}.${P_PDY}/${p_cyc}/atmos/oznmon} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + +if [[ ! -d ${TANKverf_ozn} ]]; then + mkdir -p -m 775 ${TANKverf_ozn} +fi + +#--------------------------------------- +# set up validation file +# +if [[ ${VALIDATE_DATA} -eq 1 ]]; then + export ozn_val_file=${ozn_val_file:-${FIXgdas_ozn}/gdas_oznmon_base.tar} +fi + +#--------------------------------------- +# Set necessary environment variables +# +export OZN_AREA=${OZN_AREA:-glb} +export oznstat=${oznstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.oznstat} + + +#------------------------------------------------------- +# Execute the script. +# +${OZNMONSH:-${SCRgdas_ozn}/exgdas_atmos_verfozn.sh} ${PDY} ${cyc} +err=$? +[[ ${err} -ne 0 ]] && exit ${err} + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-NO} +cd ${DATAROOT} +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${DATA} +fi + +exit 0 diff --git a/jobs/JGDAS_ATMOS_VERFRAD b/jobs/JGDAS_ATMOS_VERFRAD new file mode 100755 index 0000000000..42e112c74f --- /dev/null +++ b/jobs/JGDAS_ATMOS_VERFRAD @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +############################################################# +# Set up environment for GDAS Radiance Monitor job +############################################################# +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" + +export COMPONENT="atmos" + +export RAD_DATA_IN=${DATA} + +export RADMON_SUFFIX=${RADMON_SUFFIX:-${RUN}} +export CYCLE_INTERVAL=${CYCLE_INTERVAL:-6} + +mkdir -p ${RAD_DATA_IN} +cd ${RAD_DATA_IN} + +############################################## +# Specify Execution Areas +############################################## +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export SCRgfs=${SCRgfs:-${HOMEgfs}/scripts} + +export FIXgdas=${FIXgdas:-${HOMEgfs}/fix/gdas} +export PARMmon=${PARMmon:-${HOMEgfs}/parm/mon} + +export HOMEradmon=${HOMEradmon:-${HOMEgfs}} +export EXECradmon=${EXECradmon:-${HOMEradmon}/exec} +export FIXradmon=${FIXradmon:-${FIXgfs}} +export USHradmon=${USHradmon:-${HOMEradmon}/ush} + + +################################### +# source the parm file +################################### +parm_file=${parm_file:-${PARMmon}/da_mon.parm} +. ${parm_file} + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +pdate=$(${NDATE} -6 ${PDY}${cyc}) +echo "pdate = ${pdate}" + +export P_PDY=${pdate:0:8} +export p_cyc=${pdate:8:2} + +############################################# +# COMOUT - WHERE GSI OUTPUT RESIDES +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +export TANKverf=${TANKverf:-$(compath.py ${envir}/${NET}/${gfs_ver})} +export TANKverf_rad=${TANKverf_rad:-${TANKverf}/${RUN}.${PDY}/${cyc}/atmos/radmon} +export TANKverf_radM1=${TANKverf_radM1:-${TANKverf}/${RUN}.${P_PDY}/${p_cyc}/atmos/radmon} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + +mkdir -p -m 775 ${TANKverf_rad} + +######################################## +# Set necessary environment variables +######################################## +export RAD_AREA=${RAD_AREA:-glb} + +export biascr=${biascr:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.abias} +export radstat=${radstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.radstat} + +echo " " +echo "JOB HAS STARTED" +echo " " + + +######################################################## +# Execute the script. +${RADMONSH:-${SCRgfs}/exgdas_atmos_verfrad.sh} ${PDY} ${cyc} +err=$? + +if [[ ${err} -ne 0 ]] ; then + exit ${err} +else + echo " " + echo "JOB HAS COMPLETED NORMALLY" + echo " " +fi + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-YES} +cd ${DATAROOT} +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${RAD_DATA_IN} +fi + diff --git a/jobs/JGDAS_ATMOS_VMINMON b/jobs/JGDAS_ATMOS_VMINMON new file mode 100755 index 0000000000..3f9c0d856f --- /dev/null +++ b/jobs/JGDAS_ATMOS_VMINMON @@ -0,0 +1,74 @@ +#! /usr/bin/env bash + +########################################################### +# GDAS Minimization Monitor (MinMon) job +########################################################### +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export MINMON_SUFFIX=${MINMON_SUFFIX:-${NET}} +export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} + + +############################################## +# Specify Package Areas +############################################## +export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} +export SCRgfs=${SCRgfs:-${HOMEgfs}/scripts} + +export M_FIXgdas=${M_FIXgdas:-${HOMEgfs}/fix/gdas} + +export HOMEminmon=${HOMEminmon:-${HOMEgfs}} +export EXECminmon=${EXECminmon:-${HOMEminmon}/exec} +export USHminmon=${USHminmon:-${HOMEminmon}/ush} + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +pdate=$(${NDATE} -6 ${PDY}${cyc}) +echo "pdate = ${pdate}" + +export P_PDY=${pdate:0:8} +export p_cyc=${pdate:8:2} + + +############################################# +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +export M_TANKverf=${M_TANKverf:-${COM_IN}/${RUN}.${PDY}/${cyc}/atmos/minmon} +export M_TANKverfM1=${M_TANKverfM1:-${COM_IN}/${RUN}.${P_PDY}/${p_cyc}/atmos/minmon} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + +mkdir -p -m 775 ${M_TANKverf} + + +######################################## +# Set necessary environment variables +######################################## +export CYCLE_INTERVAL=6 +export gsistat=${gsistat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.gsistat} + + +######################################################## +# Execute the script. +${GMONSH:-${SCRgfs}/exgdas_atmos_vminmon.sh} ${PDY} ${cyc} +err=$? +[[ ${err} -ne 0 ]] && exit ${err} + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-NO} +cd ${DATAROOT} +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${DATA} +fi + +exit 0 diff --git a/jobs/JGDAS_ENKF_ARCHIVE b/jobs/JGDAS_ENKF_ARCHIVE new file mode 100755 index 0000000000..f986fd38b2 --- /dev/null +++ b/jobs/JGDAS_ENKF_ARCHIVE @@ -0,0 +1,45 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "earc" -c "base earc" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${RUN/enkf} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_TOP +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_ENSSTAT:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_ENSSTAT:COM_ATMOS_HISTORY_TMPL + +############################################################### +# Run archive script +############################################################### + +"${SCRgfs}/exgdas_enkf_earc.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################################### + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || (echo "${DATAROOT} does not exist. ABORT!"; exit 1) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGDAS_ENKF_DIAG b/jobs/JGDAS_ENKF_DIAG new file mode 100755 index 0000000000..40f2968869 --- /dev/null +++ b/jobs/JGDAS_ENKF_DIAG @@ -0,0 +1,123 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "eobs" -c "base anal eobs analdiag ediag" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP="${RUN/enkf}" +export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} +export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export CASE=${CASE_ENS} + +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${RUN}.t${cyc}z." +export GPREFIX="${GDUMP_ENS}.t${gcyc}z." +GPREFIX_DET="${GDUMP}.t${gcyc}z." + +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL + +MEMDIR="ensstat" RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + + +export ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.ensmean.nc" +if [ ! -f ${ATMGES_ENSMEAN} ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" + exit 1 +fi + +# Link observational data +export PREPQC="${COM_OBS}/${OPREFIX}prepbufr" +if [[ ! -f ${PREPQC} ]]; then + echo "WARNING: Global PREPBUFR FILE ${PREPQC} MISSING" +fi +export TCVITL="${COM_OBS}/${OPREFIX}syndata.tcvitals.tm00" +if [[ ${DONST} = "YES" ]]; then + export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" +fi +export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" + +# Guess Bias correction coefficients related to control +export GBIAS=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias +export GBIASPC=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_pc +export GBIASAIR=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_air +export GRADSTAT=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}radstat + +# Bias correction coefficients related to ensemble mean +export ABIAS="${COM_ATMOS_ANALYSIS}/${APREFIX}abias.ensmean" +export ABIASPC="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc.ensmean" +export ABIASAIR="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air.ensmean" +export ABIASe="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_int.ensmean" + +# Diagnostics related to ensemble mean +export GSISTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat.ensmean" +export CNVSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat.ensmean" +export OZNSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat.ensmean" +export RADSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}radstat.ensmean" + +# Select observations based on ensemble mean +export RUN_SELECT="YES" +export USE_SELECT="NO" +export SELECT_OBS="${COM_ATMOS_ANALYSIS}/${APREFIX}obsinput.ensmean" + +export DIAG_SUFFIX="_ensmean" +export DIAG_COMPRESS="NO" + +# GSI namelist options specific to eobs +export SETUP_INVOBS="passive_bc=.false.,${SETUP_INVOBS}" + +# Ensure clean stat tarballs for ensemble mean +for fstat in ${CNVSTAT} ${OZNSTAT} ${RADSTAT}; do + [[ -f ${fstat} ]] && rm -f ${fstat} +done + + +############################################################### +# Run relevant script + +${ANALDIAGSH:-${SCRgfs}/exglobal_diag.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGDAS_ENKF_ECEN b/jobs/JGDAS_ENKF_ECEN new file mode 100755 index 0000000000..cd77eebb55 --- /dev/null +++ b/jobs/JGDAS_ENKF_ECEN @@ -0,0 +1,68 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ecen" -c "base ecen" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP="${RUN/enkf}" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export CASE=${CASE_ENS} + +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export APREFIX_ENS="${RUN}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." + +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL + +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_STAT:COM_ATMOS_ANALYSIS_TMPL + +MEMDIR="ensstat" RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_STAT_PREV:COM_ATMOS_HISTORY_TMPL + + +############################################################### +# Run relevant script + +${ENKFRECENSH:-${SCRgfs}/exgdas_enkf_ecen.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGDAS_ENKF_FCST b/jobs/JGDAS_ENKF_FCST new file mode 100755 index 0000000000..45d0ad8b1d --- /dev/null +++ b/jobs/JGDAS_ENKF_FCST @@ -0,0 +1,82 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "efcs" -c "base fcst efcs" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${RUN/enkf} +export rCDUMP="enkfgdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export CASE=${CASE_ENS} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_TOP + + +# Forecast length for EnKF forecast +export FHMIN=${FHMIN_ENKF} +export FHOUT=${FHOUT_ENKF} +export FHMAX=${FHMAX_ENKF} + +# Get ENSBEG/ENSEND from ENSGRP and NMEM_EFCSGRP +if [[ $CDUMP == "gfs" ]]; then + export NMEM_EFCSGRP=${NMEM_EFCSGRP_GFS:-${NMEM_EFCSGRP:-1}} +fi +export ENSEND=$((NMEM_EFCSGRP * 10#${ENSGRP})) +export ENSBEG=$((ENSEND - NMEM_EFCSGRP + 1)) + + +############################################################### +# Run relevant script + +${ENKFFCSTSH:-${SCRgfs}/exgdas_enkf_fcst.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +# Double check the status of members in ENSGRP +EFCSGRP="${COM_TOP}/efcs.grp${ENSGRP}" +npass=0 +if [ -f ${EFCSGRP} ]; then + npass=$(grep "PASS" ${EFCSGRP} | wc -l) +fi +echo "${npass}/${NMEM_EFCSGRP} members successfull in efcs.grp${ENSGRP}" +if [ ${npass} -ne ${NMEM_EFCSGRP} ]; then + echo "FATAL ERROR: Failed members in group ${ENSGRP}, ABORT!" + cat ${EFCSGRP} + exit 99 +fi + + +############################################## +# Send Alerts +############################################## +if [ ${SENDDBN} = YES ] ; then + ${DBNROOT}/bin/dbn_alert MODEL ENKF1_MSC_fcsstat ${job} ${EFCSGRP} +fi + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "${pgmout}" ] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + +exit 0 diff --git a/jobs/JGDAS_ENKF_POST b/jobs/JGDAS_ENKF_POST new file mode 100755 index 0000000000..0f7039d614 --- /dev/null +++ b/jobs/JGDAS_ENKF_POST @@ -0,0 +1,49 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "epos" -c "base epos" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${RUN/enkf} + + +############################################## +# Begin JOB SPECIFIC work +############################################## +export GFS_NCIO=${GFS_NCIO:-"YES"} + +export PREFIX="${RUN}.t${cyc}z." + +export LEVS=$((LEVS-1)) + + +############################################################### +# Run relevant script + +${ENKFPOSTSH:-${SCRgfs}/exgdas_enkf_post.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "${pgmout}" ] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGDAS_ENKF_SELECT_OBS b/jobs/JGDAS_ENKF_SELECT_OBS new file mode 100755 index 0000000000..7c02512989 --- /dev/null +++ b/jobs/JGDAS_ENKF_SELECT_OBS @@ -0,0 +1,149 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "eobs" -c "base anal eobs" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${RUN/enkf} +export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} +export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -${assim_freq} ${PDY}${cyc}) +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${RUN}.t${cyc}z." +export GPREFIX="${GDUMP_ENS}.t${gcyc}z." +APREFIX_DET="${CDUMP}.t${cyc}z." +GPREFIX_DET="${GDUMP}.t${gcyc}z." + +export GSUFFIX=".ensmean.nc" + +# Generate COM variables from templates +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS +MEMDIR='ensstat' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +declare -rx COM_ATMOS_ANALYSIS_ENS="${COM_ATMOS_ANALYSIS}" + +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -r COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL + +MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + +RUN="${GDUMP}" YMD=${gPDY} HH=${gcyc} generate_com -r COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL + +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" + +export ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006${GSUFFIX}" +if [[ ! -f ${ATMGES_ENSMEAN} ]]; then + echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" + exit 1 +fi + +# Ignore masking of chained commands and possible misspelling warning +# shellcheck disable=SC2153,SC2312 +LEVS=$(${NCDUMP} -h "${ATMGES_ENSMEAN}" | grep -i "pfull" | head -1 | awk -F" = " '{print $2}' | awk -F" " '{print $1}') # get LEVS +# shellcheck disable= +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" +export LEVS + +# Link observational data +export PREPQC="${COM_OBS}/${OPREFIX}prepbufr" +if [[ ! -f ${PREPQC} ]]; then + echo "WARNING: Global PREPBUFR FILE ${PREPQC} MISSING" +fi +export TCVITL="${COM_OBS}/${APREFIX_DET}syndata.tcvitals.tm00" +if [[ ${DONST} = "YES" ]]; then + export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" +fi +export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" + +# Deterministic analysis and increment files +export SFCANL="${COM_ATMOS_ANALYSIS_DET}/${APREFIX_DET}sfcanl.nc" +export DTFANL="${COM_ATMOS_ANALYSIS_DET}/${APREFIX_DET}dtfanl.nc" +export ATMANL="${COM_ATMOS_ANALYSIS_DET}/${APREFIX_DET}atmanl.nc" +export ATMINC="${COM_ATMOS_ANALYSIS_DET}/${APREFIX_DET}atminc.nc" + +# Guess Bias correction coefficients related to control +export GBIAS=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias +export GBIASPC=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_pc +export GBIASAIR=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}abias_air +export GRADSTAT=${COM_ATMOS_ANALYSIS_DET_PREV}/${GPREFIX_DET}radstat + +# Bias correction coefficients related to ensemble mean +export ABIAS="${COM_ATMOS_ANALYSIS}/${APREFIX}abias.ensmean" +export ABIASPC="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc.ensmean" +export ABIASAIR="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air.ensmean" +export ABIASe="${COM_ATMOS_ANALYSIS}/${APREFIX}abias_int.ensmean" + +# Diagnostics related to ensemble mean +export GSISTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat.ensmean" +export CNVSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat.ensmean" +export OZNSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat.ensmean" +export RADSTAT="${COM_ATMOS_ANALYSIS}/${APREFIX}radstat.ensmean" + +# Select observations based on ensemble mean +export RUN_SELECT="YES" +export USE_SELECT="NO" +export SELECT_OBS="${COM_ATMOS_ANALYSIS}/${APREFIX}obsinput.ensmean" + +export DIAG_SUFFIX="_ensmean" + +# GSI namelist options specific to eobs +export SETUP_INVOBS="passive_bc=.false.,${SETUP_INVOBS}" + +# Ensure clean stat tarballs for ensemble mean +for fstat in ${CNVSTAT} ${OZNSTAT} ${RADSTAT}; do + [[ -f ${fstat} ]] && rm -f ${fstat} +done + + +############################################################### +# Run relevant script + +${INVOBSSH:-${SCRgfs}/exgdas_enkf_select_obs.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# Send Alerts +############################################## +if [[ ${SENDDBN} = YES ]] ; then + ${DBNROOT}/bin/dbn_alert MODEL ENKF1_MSC_gsistat ${job} ${GSISTAT} +fi + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGDAS_ENKF_SFC b/jobs/JGDAS_ENKF_SFC new file mode 100755 index 0000000000..3214812db8 --- /dev/null +++ b/jobs/JGDAS_ENKF_SFC @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "esfc" -c "base esfc" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP="${RUN/enkf}" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." + +export CASE=${CASE_ENS} + +export OPREFIX="${CDUMP}.t${cyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export APREFIX_ENS="${RUN}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." + +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS \ + COM_ATMOS_ANALYSIS_DET:COM_ATMOS_ANALYSIS_TMPL + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_ATMOS_ANALYSIS_DET_PREV:COM_ATMOS_ANALYSIS_TMPL + +############################################################### +# Run relevant script + +${ENKFRESFCSH:-${SCRgfs}/exgdas_enkf_sfc.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGDAS_ENKF_UPDATE b/jobs/JGDAS_ENKF_UPDATE new file mode 100755 index 0000000000..1050529165 --- /dev/null +++ b/jobs/JGDAS_ENKF_UPDATE @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "eupd" -c "base anal eupd" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP="${RUN/enkf}" + + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export APREFIX="${RUN}.t${cyc}z." +export GPREFIX="${GDUMP_ENS}.t${gcyc}z." + +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_STAT:COM_ATMOS_ANALYSIS_TMPL + +MEMDIR="ensstat" RUN="enkfgdas" YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_STAT_PREV:COM_ATMOS_HISTORY_TMPL + + +############################################################### +# Run relevant script + +${ENKFUPDSH:-${SCRgfs}/exgdas_enkf_update.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# Send Alerts +############################################## +if [ ${SENDDBN} = YES ] ; then + "${DBNROOT}/bin/dbn_alert" "MODEL" "ENKF1_MSC_enkfstat" "${job}" "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX}enkfstat" +fi + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "${pgmout}" ] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGDAS_FIT2OBS b/jobs/JGDAS_FIT2OBS new file mode 100755 index 0000000000..d673845404 --- /dev/null +++ b/jobs/JGDAS_FIT2OBS @@ -0,0 +1,88 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "fit2obs" -c "base fit2obs" + + +############################################## +# Set variables used in the script +############################################## + +export CDUMP=${RUN/enkf} + +# Ignore spelling warning; nothing is misspelled +# shellcheck disable=SC2153 +CDATE=$(${NDATE} -"${VBACKUP_FITS}" "${PDY}${cyc}") # set CDATE to lookback cycle for use in fit2obs package +export CDATE +vday=${CDATE:0:8} +vcyc=${CDATE:8:2} + +export COM_INA=${ROTDIR}/gdas.${vday}/${vcyc}/atmos +# We want to defer variable expansion, so ignore warning about single quotes +# shellcheck disable=SC2016 +export COM_INF='$ROTDIR/vrfyarch/gfs.$fdy/$fzz' +export COM_PRP=${ROTDIR}/gdas.${vday}/${vcyc}/obs + +export PRPI=${COM_PRP}/${RUN}.t${vcyc}z.prepbufr +export sig1=${COM_INA}/${RUN}.t${vcyc}z.atmanl.nc +export sfc1=${COM_INA}/${RUN}.t${vcyc}z.atmanl.nc +export CNVS=${COM_INA}/${RUN}.t${vcyc}z.cnvstat + +export OUTPUT_FILETYPE=${OUTPUT_FILETYPE:-netcdf} + +export FIT_DIR=${ARCDIR}/fits +[[ ! -d "${FIT_DIR}" ]] && mkdir -p "${FIT_DIR}" +export HORZ_DIR=${ARCDIR}/horiz +[[ ! -d "${HORZ_DIR}" ]] && mkdir -p "${HORZ_DIR}" +export COMLOX=${DATA}/fitx +[[ ! -d "${COMLOX}" ]] && mkdir -p "${COMLOX}" + +echo "echo err_chk">"${DATA}"/err_chk; chmod 755 "${DATA}"/err_chk +echo "echo postmsg">"${DATA}"/postmsg; chmod 755 "${DATA}"/postmsg + +############################################## +# Check spinup and available inputs +############################################## + +# Ignore spelling warning; nothing is misspelled +# shellcheck disable=SC2153 +if [[ ${CDATE} -gt ${SDATE} ]]; then + for file in ${PRPI} ${sig1} ${sfc1} ${CNVS}; do + if [[ ! -f "${file}" ]]; then + echo "FATAL ERROR: FILE MISSING: ${file}" + exit 1 + fi + done + + ############################################## + # RUN FIT2OBS VERIFICATION + ############################################## + + "${SCRIPTSfit2obs}/excfs_gdas_vrfyfits.sh" + status=$? + [[ ${status} -ne 0 ]] && exit "${status}" + + ############################################## + # End JOB SPECIFIC work + ############################################## + + ############################################## + # Final processing + ############################################## + if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" + fi + +else + + echo "Too early for FIT2OBS to run. Exiting." + +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || (echo "FATAL ERROR: ${DATAROOT} does not exist. ABORT!"; exit 1) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT new file mode 100755 index 0000000000..613de589d2 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT @@ -0,0 +1,45 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" + +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun" + + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat.sh} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Do not remove the Temporary working directory (do this in POST) +########################################## +cd "${DATAROOT}" || exit 1 + +exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY new file mode 100755 index 0000000000..c85b5c886b --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT_VRFY @@ -0,0 +1,44 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun" + + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/ocean} + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_bmat_vrfy.sh} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Do not remove the Temporary working directory (do this in POST) +########################################## +cd "${DATAROOT}" || exit 1 + +exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT new file mode 100755 index 0000000000..7e4294bd7c --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT @@ -0,0 +1,59 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalchkpt" -c "base ocnanal ocnanalchkpt" + + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export GDATE +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export GPREFIX="${GDUMP}.t${gcyc}z." +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +export APREFIX="${CDUMP}.t${cyc}z." + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_chkpt.sh} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Do not remove the Temporary working directory (do this in POST) +########################################## +cd "${DATAROOT}" || exit 1 + +exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST new file mode 100755 index 0000000000..eb9607ad21 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST @@ -0,0 +1,46 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalpost" -c "base ocnanalpost" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export CDATE=${CDATE:-${PDY}${cyc}} +export GDUMP=${GDUMP:-"gdas"} + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_ANALYSIS COM_ICE_RESTART + +mkdir -p "${COM_OCEAN_ANALYSIS}" +mkdir -p "${COM_ICE_RESTART}" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Add UFSDA to PYTHONPATH +ufsdaPATH="${HOMEgfs}/sorc/gdas.cd/ush/" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${ufsdaPATH}" +export PYTHONPATH + +############################################################### +# Run relevant script +############################################################### + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_post.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA}" = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP new file mode 100755 index 0000000000..c3fd5b5d65 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP @@ -0,0 +1,60 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanalprep" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export GDATE +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP=${GDUMP:-"gdas"} + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL \ + COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL \ + COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Add UFSDA to PYTHONPATH +ufsdaPATH="${HOMEgfs}/sorc/gdas.cd/ush/" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${ufsdaPATH}" +export PYTHONPATH + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_prep.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN new file mode 100755 index 0000000000..87ca5560c4 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN @@ -0,0 +1,42 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA="${DATAROOT}/${RUN}ocnanal_${cyc}" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalrun" -c "base ocnanal ocnanalrun" + + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_run.sh} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Do not remove the Temporary working directory (do this in POST) +########################################## +cd "${DATAROOT}" || exit 1 + +exit 0 diff --git a/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY new file mode 100755 index 0000000000..b0efa93532 --- /dev/null +++ b/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY @@ -0,0 +1,56 @@ +#!/bin/bash +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnanalprep" -c "base ocnanal ocnanalprep" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${CDUMP:-${RUN:-"gfs"}} +export GDUMP=${GDUMP:-"gdas"} +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} + +RUN=${GDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_ANALYSIS +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_OCEAN_HISTORY_PREV:COM_OCEAN_HISTORY_TMPL +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx COM_ICE_HISTORY_PREV:COM_ICE_HISTORY_TMPL +# To allow extraction of statistics from diag files +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Add UFSDA to PYTHONPATH +export PYTHONPATH=${HOMEgfs}/sorc/gdas.cd/ush/:${HOMEgfs}/sorc/gdas.cd/ush/eva:${PYTHONPATH} + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASPREPPY:-${HOMEgfs}/sorc/gdas.cd/scripts/exgdas_global_marine_analysis_vrfy.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Do not remove the Temporary working directory (do this in POST) +########################################## +cd "${DATAROOT}" || exit 1 + +exit 0 diff --git a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG index ea70ae7b14..0119bc7f2d 100755 --- a/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +++ b/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG @@ -1,68 +1,43 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} -date - -########################################### -# GFS_AWIPS_20KM AWIPS PRODUCT GENERATION -########################################### - -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips" -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY +export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} +export HOMEgfs=${HOMEgfs:-${PACKAGEROOT}/gfs.${gfs_ver}} +export USHgfs=${USHgfs:-${HOMEgfs}/ush} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo} +export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product} +export FIXgfs=${FIXgfs:-${HOMEgfs}/fix} ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} export model=${model:-gfs} -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - export SENDDBN=${SENDDBN:-NO} +export SENDAWIP=${SENDAWIP:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WMO +GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL + +if [[ ${SENDCOM} == "YES" && ! -d "${COM_ATMOS_WMO}" ]] ; then + mkdir -m 775 -p "${COM_ATMOS_WMO}" fi export pgmout=OUTPUT.$$ -env ######################################################## # Execute the script. @@ -72,25 +47,21 @@ env ######################################################## # Execute the script. -$HOMEgfs/scripts/exgfs_atmos_awips_20km_1p0deg.sh $fcsthrs +${HOMEgfs}/scripts/exgfs_atmos_awips_20km_1p0deg.sh ${fcsthrs} export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf ${DATA} fi -date diff --git a/jobs/JGFS_ATMOS_AWIPS_G2 b/jobs/JGFS_ATMOS_AWIPS_G2 index 634cf07cdd..94151fbd72 100755 --- a/jobs/JGFS_ATMOS_AWIPS_G2 +++ b/jobs/JGFS_ATMOS_AWIPS_G2 @@ -1,93 +1,67 @@ -#!/bin/sh - -set -xa -export PS4='$SECONDS + ' -date -export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} +#! /usr/bin/env bash ######################################## # GFS_AWIPS_G2 AWIPS PRODUCT GENERATION ######################################## -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips" -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY +export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} -export UTILgfs=${UTILgfs:-$HOMEgfs/util} +export USHgfs=${USHgfs:-${HOMEgfs}/ush} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo} +export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product} +export FIXgfs=${FIXgfs:-${HOMEgfs}/fix} +export UTILgfs=${UTILgfs:-${HOMEgfs}/util} ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} export model=${model:-gfs} -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - export SENDDBN=${SENDDBN:-NO} +export SENDAWIP=${SENDAWIP:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WMO +GRID="0p25" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL + +if [[ ${SENDCOM} == "YES" && ! -d "${COM_ATMOS_WMO}" ]] ; then + mkdir -m 775 -p "${COM_ATMOS_WMO}" fi export pgmout=OUTPUT.$$ -env ######################################################## # Execute the script. ######################################################### mkdir -m 775 awips_g1 -cd $DATA/awips_g1 -$HOMEgfs/scripts/exgfs_atmos_grib_awips.sh $fcsthrs +cd ${DATA}/awips_g1 +${HOMEgfs}/scripts/exgfs_atmos_grib_awips.sh ${fcsthrs} export err=$?; err_chk -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf ${DATA} fi -date diff --git a/jobs/JGFS_ATMOS_CYCLONE_GENESIS b/jobs/JGFS_ATMOS_CYCLONE_GENESIS index 090e1e1bfb..85e4bf7651 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_GENESIS +++ b/jobs/JGFS_ATMOS_CYCLONE_GENESIS @@ -1,75 +1,16 @@ -#!/bin/ksh -set -x - -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date - - -############################# -# Source relevant config files -############################# -configs="base vrfy" -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env vrfy -status=$? -[[ $status -ne 0 ]] && exit $status +#! /usr/bin/env bash +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" -############################################## -# Obtain unique process id (pid) and make temp directory -############################################## -if [ $RUN_ENVIR = "nco" ]; then - export DATA=${DATA:-${DATAROOT}/${jobid:?}} -else - export job="gfs_cyclone_genesis" - export DATA="$DATAROOT/${job}$$" - [[ -d $DATA ]] && rm -rf $DATA -fi -mkdir -p $DATA -cd $DATA - - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - -#################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-atmos} - -############################################## -# Determine Job Output Name on System -############################################## -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile +# TODO (#1220) Determine if this is still needed +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################################## # Set variables used in the exglobal script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi -export cmodel=$CDUMP +export cmodel=${RUN} #################################### # SENDCOM - Copy Files From TMPDIR to $COMOUT @@ -82,38 +23,37 @@ export SENDECF=${SENDECF:-NO} #################################### # Specify Execution Areas #################################### -export HOMEens_tracker=${HOMEens_tracker:-${NWROOT:?}/ens_tracker.${ens_tracker_ver}} -export EXECens_tracker=${EXECens_tracker:-$HOMEens_tracker/exec} -export FIXens_tracker=${FIXens_tracker:-$HOMEens_tracker/fix} -export USHens_tracker=${USHens_tracker:-$HOMEens_tracker/ush} -export SCRIPTens_tracker=${SCRIPTens_tracker:-$HOMEens_tracker/scripts} +export HOMEens_tracker=${HOMEens_tracker:-${PACKAGEROOT}/ens_tracker.${ens_tracker_ver}} +export EXECens_tracker=${EXECens_tracker:-${HOMEens_tracker}/exec} +export FIXens_tracker=${FIXens_tracker:-${HOMEens_tracker}/fix} +export USHens_tracker=${USHens_tracker:-${HOMEens_tracker}/ush} +export SCRIPTens_tracker=${SCRIPTens_tracker:-${HOMEens_tracker}/scripts} ############################################## # Define COM directories ############################################## -export COMIN=${ROTDIR}/${RUN}.${PDY}/${cyc}/$COMPONENT -export gfsdir=${COMIN} -export COMINgfs=${COMIN} -export COMOUT=${ROTDIR}/${RUN}.${PDY}/${cyc}/$COMPONENT +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GENESIS +YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL + +# The following variables are used by the tracker scripts which are outside +# of global-workflow and therefore can't be standardized at this time +export COMIN=${COM_ATMOS_GRIB_0p25} +export gfsdir=${COM_ATMOS_GRIB_0p25} +export COMINgfs=${COM_ATMOS_GRIB_0p25} -export JYYYY=$(echo ${PDY} | cut -c1-4) -export COMINgenvit=${COMINgenvit:-${COMOUT}/genesis_vital_${JYYYY}} -export COMOUTgenvit=${COMOUTgenvit:-${COMOUT}/genesis_vital_${JYYYY}} -#export COMINgenvit=${COMINgenvit:-${DATA}/genesis_vital_${JYYYY}} -#export COMOUTgenvit=${COMOUTgenvit:-${DATA}/genesis_vital_${JYYYY}} +export COMINgenvit=${COM_ATMOS_GENESIS} +export COMOUTgenvit=${COM_ATMOS_GENESIS} +export COMOUT=${COM_ATMOS_GENESIS} -export COMINsyn=${COMINsyn:-$(compath.py gfs/prod/syndat)} +export COMINsyn=${COMINsyn:-$(compath.py "${envir}/com/gfs/${gfs_ver}")/syndat} -mkdir -m 775 -p $COMOUTgenvit +mkdir -m 775 -p "${COMOUTgenvit}" + +export JYYYY=${PDY:0:4} ############################################## # Run relevant script ############################################## -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - ${SCRIPTens_tracker}/exgfs_tc_genesis.sh export err=$?; err_chk @@ -121,19 +61,15 @@ export err=$?; err_chk ############################################## # Final processing ############################################## -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGFS_ATMOS_CYCLONE_TRACKER b/jobs/JGFS_ATMOS_CYCLONE_TRACKER index 977c59a132..3aa3c6f5f4 100755 --- a/jobs/JGFS_ATMOS_CYCLONE_TRACKER +++ b/jobs/JGFS_ATMOS_CYCLONE_TRACKER @@ -1,76 +1,20 @@ -#!/bin/ksh -set -x - -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date - - -############################# -# Source relevant config files -############################# -configs="base vrfy" -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done +#! /usr/bin/env bash +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env vrfy -status=$? -[[ $status -ne 0 ]] && exit $status - - -############################################## -# Obtain unique process id (pid) and make temp directory -############################################## -if [ $RUN_ENVIR = "nco" ]; then - export DATA=${DATA:-${DATAROOT}/${jobid:?}} -else - export job="gfs_cyclone_tracker" - export DATA="$DATAROOT/${job}$$" - [[ -d $DATA ]] && rm -rf $DATA -fi -mkdir -p $DATA -cd $DATA - - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - - -#################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-atmos} +# TODO (#1220) Determine if this is still needed +export RUN_ENVIR=${RUN_ENVIR:-"nco"} -############################################## -# Determine Job Output Name on System -############################################## -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile +export COMPONENT="atmos" ############################################## # Set variables used in the exglobal script ############################################## export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi +export CDUMP=${RUN/enkf} #################################### @@ -84,46 +28,50 @@ export SENDECF=${SENDECF:-NO} #################################### # Specify Execution Areas #################################### -export HOMEens_tracker=${HOMEens_tracker:-${NWROOT:?}/ens_tracker.${ens_tracker_ver}} -export EXECens_tracker=${EXECens_tracker:-$HOMEens_tracker/exec} -export FIXens_tracker=${FIXens_tracker:-$HOMEens_tracker/fix} -export USHens_tracker=${USHens_tracker:-$HOMEens_tracker/ush} +export HOMEens_tracker=${HOMEens_tracker:-${PACKAGEROOT}/ens_tracker.${ens_tracker_ver}} +export EXECens_tracker=${EXECens_tracker:-${HOMEens_tracker}/exec} +export FIXens_tracker=${FIXens_tracker:-${HOMEens_tracker}/fix} +export USHens_tracker=${USHens_tracker:-${HOMEens_tracker}/ush} ############################################## # Define COM and Data directories ############################################## -export COMIN=${ROTDIR}/${RUN}.${PDY}/${cyc}/$COMPONENT -export COMINgfs=${COMIN} -export gfsdir=${COMINgfs} -export COMINgdas=${COMIN} -export gdasdir=${COMINgdas} -export COMOUT=${ROTDIR}/${RUN}.${PDY}/${cyc}/$COMPONENT -export COMINsyn=${COMINsyn:-$(compath.py arch/prod/syndat)} - -if [ $RUN_ENVIR = "nco" ]; then - export COMOUThur=${COMROOTp1:?}/hur/${envir}/global - export COMOUTatcf=${COMROOTp1:?}/nhc/${envir}/atcf - mkdir -m 775 -p $COMOUThur $COMOUTatcf +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_TRACK COM_ATMOS_GENESIS +YMD=${PDY} HH=${cyc} GRID="0p25" generate_com -rx COM_ATMOS_GRIB_0p25:COM_ATMOS_GRIB_TMPL + +if [[ ! -d "${COM_ATMOS_TRACK}" ]]; then mkdir -p "${COM_ATMOS_TRACK}"; fi + +# The following variables are used by the tracker scripts which are outside +# of global-workflow and therefore can't be standardized at this time +export COMINgfs=${COM_ATMOS_GRIB_0p25} +export gfsdir=${COM_ATMOS_GRIB_0p25} +export COMINgdas=${COM_ATMOS_GRIB_0p25} +export gdasdir=${COM_ATMOS_GRIB_0p25} +export COMOUT=${COM_ATMOS_TRACK} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export COMINgenvit=${COM_ATMOS_GENESIS} + +if [ ${RUN_ENVIR} = "nco" ]; then + export COMOUThur=${COMROOTp1}/hur/${envir}/global + export COMOUTatcf=${COMROOTp1}/nhc/${envir}/atcf + mkdir -m 775 -p ${COMOUThur} ${COMOUTatcf} else -# export COMOUThur=$COMOUT +# export COMOUThur=$COMOUT # export COMOUTatcf=$COMOUT - export COMOUThur=$DATA - export COMOUTatcf=$DATA + export COMOUThur=${DATA} + export COMOUTatcf=${DATA} fi ############################################## # Run relevant script ############################################## -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT ############################################################# # Execute the script export pert="p01" -export cmodel=$CDUMP +export cmodel=${CDUMP} export loopnum=1 #-----------input data checking ----------------- @@ -132,25 +80,7 @@ export err=$?; err_chk #------------------------------------------------ machine=${machine:-$(echo ${SITE})} -if [ $machine = TIDE -o $machine = GYRE ] ; then # For WCOSS - machine=wcoss - ${USHens_tracker}/extrkr_gfs.sh ${loopnum} ${cmodel} ${CDATE} ${pert} ${DATA} -elif [ $machine = LUNA -o $machine = SURGE -o $machine = WCOSS_C ] ; then # For CRAY - machine=cray - ${APRUNTRACK} ${USHens_tracker}/extrkr_gfs.sh ${loopnum} ${cmodel} ${CDATE} ${pert} ${DATA} -elif [ $machine = VENUS -o $machine = MARS -o $machine = WCOSS_DELL_P3 ] ; then # For DELL - machine=dell - mpirun -n 1 ${USHens_tracker}/extrkr_gfs.sh ${loopnum} ${cmodel} ${CDATE} ${pert} ${DATA} -elif [ $machine = HERA ]; then # For HERA - machine=hera - ${USHens_tracker}/extrkr_gfs.sh ${loopnum} ${cmodel} ${CDATE} ${pert} ${DATA} -elif [ $machine = ORION ]; then # For ORION - machine=orion - ${USHens_tracker}/extrkr_gfs.sh ${loopnum} ${cmodel} ${CDATE} ${pert} ${DATA} -elif [ $machine = JET ]; then # For JET - machine=jet - ${USHens_tracker}/extrkr_gfs.sh ${loopnum} ${cmodel} ${CDATE} ${pert} ${DATA} -fi +${USHens_tracker}/extrkr_gfs.sh ${loopnum} ${cmodel} ${CDATE} ${pert} ${DATA} export err=$?; err_chk @@ -167,20 +97,15 @@ export err=$?; err_chk ############################################## # Final processing ############################################## -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi - -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGFS_ATMOS_FBWIND b/jobs/JGFS_ATMOS_FBWIND index 0c43bf2643..f4b94442e8 100755 --- a/jobs/JGFS_ATMOS_FBWIND +++ b/jobs/JGFS_ATMOS_FBWIND @@ -1,90 +1,62 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +# TODO (#1221) This job is not part of the rocoto suite ############################################ # GFS FBWIND PRODUCT GENERATION ############################################ - -########################################################### -# obtain unique process id (pid) and make temp directory -########################################################### -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "fbwind" -c "base" ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} -export UTILgfs=${UTILgfs:-$HOMEgfs/util} +export USHgfs=${USHgfs:-${HOMEgfs}/ush} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo} +export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product} +export FIXgfs=${FIXgfs:-${HOMEgfs}/fix} +export UTILgfs=${UTILgfs:-${HOMEgfs}/util} ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} export model=${model:-gfs} -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} +export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo +if [ ${SENDCOM} = YES ] ; then + mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} fi -export pgmout=OUTPUT.$$ - -env ######################################################## # Execute the script. -$HOMEgfs/scripts/exgfs_atmos_fbwind.sh +${HOMEgfs}/scripts/exgfs_atmos_fbwind.sh export err=$?;err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf ${DATA} fi -date diff --git a/jobs/JGFS_ATMOS_FSU_GENESIS b/jobs/JGFS_ATMOS_FSU_GENESIS index 57aa709e06..e5fd5ff3c3 100755 --- a/jobs/JGFS_ATMOS_FSU_GENESIS +++ b/jobs/JGFS_ATMOS_FSU_GENESIS @@ -1,67 +1,11 @@ -#!/bin/ksh -set -x - -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date - - -############################# -# Source relevant config files -############################# -configs="base vrfy" -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - -##exit - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env vrfy -status=$? -[[ $status -ne 0 ]] && exit $status - - -############################################## -# Obtain unique process id (pid) and make temp directory -############################################## -if [ $RUN_ENVIR = "nco" ]; then - export DATA=${DATA:-${DATAROOT}/${jobid:?}} -else - export job="gfs_fsu_genesis" - export DATA="$DATAROOT/${job}$$" - [[ -d $DATA ]] && rm -rf $DATA -fi -mkdir -p $DATA -cd $DATA +#! /usr/bin/env bash +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - - -#################################### -# Specify NET and RUN Name and model -#################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-atmos} - +export RUN_ENVIR=${RUN_ENVIR:-"nco"} -############################################## -# Determine Job Output Name on System -############################################## -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile +export COMPONENT="atmos" ############################################## @@ -69,9 +13,6 @@ export pgmerr=errfile ############################################## export CDATE=${CDATE:-${PDY}${cyc}} export CDUMP=${CDUMP:-${RUN:-"gfs"}} -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi #################################### @@ -85,12 +26,12 @@ export SENDECF=${SENDECF:-NO} #################################### # Specify Execution Areas #################################### -export HOMEens_tracker=${HOMEens_tracker:-${NWROOT:?}/ens_tracker.${ens_tracker_ver}} -export EXECens_tracker=${EXECens_tracker:-$HOMEens_tracker/exec} -export FIXens_tracker=${FIXens_tracker:-$HOMEens_tracker/fix} -export USHens_tracker=${USHens_tracker:-$HOMEens_tracker/ush} -export SCRIPTens_tracker=${SCRIPTens_tracker:-$HOMEens_tracker/scripts} -export BINens_tracker=${BINens_tracker:-$HOMEens_tracker/ush/FSUgenesisPY/bin} +export HOMEens_tracker=${HOMEens_tracker:-${PACKAGEROOT}/ens_tracker.${ens_tracker_ver}} +export EXECens_tracker=${EXECens_tracker:-${HOMEens_tracker}/exec} +export FIXens_tracker=${FIXens_tracker:-${HOMEens_tracker}/fix} +export USHens_tracker=${USHens_tracker:-${HOMEens_tracker}/ush} +export SCRIPTens_tracker=${SCRIPTens_tracker:-${HOMEens_tracker}/scripts} +export BINens_tracker=${BINens_tracker:-${HOMEens_tracker}/ush/FSUgenesisPY/bin} export PYTHONPATH=${USHens_tracker}/FSUgenesisPY:${PYTHONPATH} ############################################## @@ -103,26 +44,22 @@ export gfsdir=${ROTDIR} export COMINgdas=${COMIN} export gdasdir=${COMINgdas} export COMOUT=${ROTDIR}/${RUN}.${PDY}/${cyc}/${COMPONENT} -export COMINsyn=${COMINsyn:-$(compath.py arch/prod/syndat)} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} -if [ $RUN_ENVIR = "nco" ]; then - export COMOUThur=${COMROOTp1:?}/hur/${envir}/global - export COMOUTatcf=${COMROOTp1:?}/nhc/${envir}/atcf - mkdir -m 775 -p $COMOUThur $COMOUTatcf +if [ ${RUN_ENVIR} = "nco" ]; then + export COMOUThur=${COMROOTp1}/hur/${envir}/global + export COMOUTatcf=${COMROOTp1}/nhc/${envir}/atcf + mkdir -m 775 -p ${COMOUThur} ${COMOUTatcf} else -# export COMOUThur=$COMOUT +# export COMOUThur=$COMOUT # export COMOUTatcf=$COMOUT - export COMOUThur=$DATA - export COMOUTatcf=$DATA + export COMOUThur=${DATA} + export COMOUTatcf=${DATA} fi ############################################## # Run relevant script ############################################## -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT ############################################################# # Execute the script @@ -132,18 +69,15 @@ export err=$?; err_chk ############################################## # Final processing ############################################## -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGFS_ATMOS_GEMPAK b/jobs/JGFS_ATMOS_GEMPAK index 173fad7d51..161f0e0883 100755 --- a/jobs/JGFS_ATMOS_GEMPAK +++ b/jobs/JGFS_ATMOS_GEMPAK @@ -1,41 +1,18 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak" -c "base gempak" -############################################ -# GFS GEMPAK PRODUCT GENERATION -############################################ - -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config} +export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix} +export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush} +export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts} # For half-degree P Grib files export DO_HD_PGRB=${DO_HD_PGRB:-YES} @@ -53,117 +30,124 @@ export DBN_ALERT_TYPE=${DBN_ALERT_TYPE:-GFS_GEMPAK} ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} export model=${model:-gfs} -export COMPONENT=${COMPONENT:-atmos} ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} - export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi +for grid in 0p25 0p50 1p00; do + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" +done -export pgmout=OUTPUT.$$ +for grid in 1p00 0p50 0p25 40km 35km_atl 35km_pac; do + prod_dir="COM_ATMOS_GEMPAK_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "COM_ATMOS_GEMPAK_${grid}:COM_ATMOS_GEMPAK_TMPL" -env + if [[ ${SENDCOM} == YES && ! -d "${!prod_dir}" ]] ; then + mkdir -m 775 -p "${!prod_dir}" + fi +done -rm -f poescript +# TODO: These actions belong in an ex-script not a j-job +if [[ -f poescript ]]; then + rm -f poescript +fi + +ocean_domain_max=180 +if (( ocean_domain_max > FHMAX_GFS )); then + ocean_domain_max=${FHMAX_GFS} +fi ################################################################# # Execute the script for the 384 hour 1 degree grib ################################################################## -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> $DATA/gfs_1p0.$$.1 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> $DATA/gfs_1p0.$$.2 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> $DATA/gfs_1p0.$$.3 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> $DATA/gfs_1p0.$$.4 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> $DATA/gfs_1p0.$$.5 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs 384 GFS_GEMPAK &> $DATA/gfs_1p0.$$.6 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.2 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.3 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.4 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.5 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_1p00} &> ${DATA}/gfs_1p0.$$.6 " >> poescript ################################################################# # Execute the script for the half-degree grib ################################################################## -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> $DATA/gfs_0p5.$$.1 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> $DATA/gfs_0p5.$$.2 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> $DATA/gfs_0p5.$$.3 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> $DATA/gfs_0p5.$$.4 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> $DATA/gfs_0p5.$$.5 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p50 384 GFS_GEMPAK &> $DATA/gfs_0p5.$$.6 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.2 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.3 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.4 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.5 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p50 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p50} &> ${DATA}/gfs_0p5.$$.6 " >> poescript ################################################################# # Execute the script for the quater-degree grib #################################################################### -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.1 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.2 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.3 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.4 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.5 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.6 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.7 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.8 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.9 " >> poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs_0p25 384 GFS_GEMPAK &> $DATA/gfs_0p25.$$.10 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.2 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.3 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.4 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.5 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.6 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.7 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.8 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.9 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs_0p25 ${FHMAX_GFS} GFS_GEMPAK ${COM_ATMOS_GEMPAK_0p25}&> ${DATA}/gfs_0p25.$$.10 " >> poescript #################################################################### # Execute the script to create the 35km Pacific grids for OPC ##################################################################### -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs35_pac 180 GFS_GEMPAK_WWB &> $DATA/gfs35_pac.$$.1 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs35_pac 180 GFS_GEMPAK_WWB &> $DATA/gfs35_pac.$$.2 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_pac ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_pac} &> ${DATA}/gfs35_pac.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_pac ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_pac} &> ${DATA}/gfs35_pac.$$.2 " >> poescript #################################################################### # Execute the script to create the 35km Atlantic grids for OPC ##################################################################### -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs35_atl 180 GFS_GEMPAK_WWB &> $DATA/gfs35_atl.$$.1 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs35_atl 180 GFS_GEMPAK_WWB &> $DATA/gfs35_atl.$$.2 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_atl ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_atl} &> ${DATA}/gfs35_atl.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs35_atl ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_35km_atl} &> ${DATA}/gfs35_atl.$$.2 " >> poescript ##################################################################### # Execute the script to create the 40km grids for HPC ###################################################################### -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs40 180 GFS_GEMPAK_WWB &> $DATA/gfs40.$$.1 " >>poescript -echo "time $SRCgfs/exgfs_atmos_nawips.sh gfs40 180 GFS_GEMPAK_WWB &> $DATA/gfs40.$$.2 " >>poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs40 ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_40km} &> ${DATA}/gfs40.$$.1 " >> poescript +echo "time ${SRCgfs}/exgfs_atmos_nawips.sh gfs40 ${ocean_domain_max} GFS_GEMPAK_WWB ${COM_ATMOS_GEMPAK_40km} &> ${DATA}/gfs40.$$.2 " >> poescript + +if [[ ${CFP_MP:-"NO"} == "YES" ]]; then + # Add task number to the MPMD script + nl -n ln -v 0 poescript > poescript.new + mv poescript.new poescript +fi cat poescript -chmod 775 $DATA/poescript +chmod 775 ${DATA}/poescript export MP_PGMMODEL=mpmd -export MP_CMDFILE=$DATA/poescript +export MP_CMDFILE=${DATA}/poescript -ntasks=${NTASKS_GEMPAK:-$(cat $DATA/poescript | wc -l)} +ntasks=$(cat ${DATA}/poescript | wc -l) ptile=${PTILE_GEMPAK:-4} threads=${NTHREADS_GEMPAK:-1} -export OMP_NUM_THREADS=$threads -APRUN="mpirun -n $ntasks cfp " +export OMP_NUM_THREADS=${threads} +APRUN=${APRUN:-"mpiexec -l -np ${ntasks} --cpu-bind verbose,core cfp"} -APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-$APRUN} -APRUNCFP=$(eval echo $APRUN_GEMPAKCFP) +APRUN_GEMPAKCFP=${APRUN_GEMPAKCFP:-${APRUN}} +APRUNCFP=${APRUN_GEMPAKCFP} -$APRUNCFP $DATA/poescript +${APRUNCFP} ${DATA}/poescript export err=$?; err_chk -cat $DATA/gfs*.$$.? - -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf ${DATA} fi -date diff --git a/jobs/JGFS_ATMOS_GEMPAK_META b/jobs/JGFS_ATMOS_GEMPAK_META index d3590b88bf..0a9f5bdd90 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_META +++ b/jobs/JGFS_ATMOS_GEMPAK_META @@ -1,53 +1,34 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +# TODO (#1222) This job is not part of the rocoto suite ############################################ # GFS GEMPAK META PRODUCT GENERATION ############################################ +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_meta" -e "base" -export LAUNCH_MODE=MPI ############################################### # Set MP variables ############################################### +export LAUNCH_MODE=MPI export OMP_NUM_THREADS=1 export MP_LABELIO=yes export MP_PULSE=0 export MP_DEBUG_NOTIMEOUT=yes -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY - ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config} +export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix} +export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush} +export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts} -cp $FIXgempak/datatype.tbl datatype.tbl +cp ${FIXgempak}/datatype.tbl datatype.tbl ############################################# #set the fcst hrs for all the cycles @@ -59,10 +40,8 @@ export fhinc=12 ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} export model=${model:-gfs} -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" ############################################## # Set up model and cycle specific variables @@ -72,49 +51,39 @@ export DBN_ALERT_TYPE=GFS_METAFILE ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT/gempak} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak/meta} -export COMINgempak=${COMINgempak:-${COMROOT}/${NET}/${envir}} +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}/gempak} +export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}/gempak/meta} +export COMINgempak=${COMINgempak:-$(compath.py ${envir}/${NET}/${gfs_ver})} -export COMINukmet=${COMINukmet:-$(compath.py nawips/prod/ukmet)} -export COMINecmwf=${COMINecmwf:-$(compath.py ecmwf/prod/ecmwf)} -export COMINnam=${COMINnam:-$(compath.py nam/prod/nam)} +export COMINukmet=${COMINukmet:-$(compath.py ${envir}/ukmet/${ukmet_ver})/ukmet} +export COMINecmwf=${COMINecmwf:-$(compath.py ${envir}/ecmwf/${ecmwf_ver})/ecmwf} +export COMINnam=${COMINnam:-$(compath.py ${envir}/nam/${nam_ver})/nam} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT +if [ ${SENDCOM} = YES ] ; then + mkdir -m 775 -p ${COMOUT} fi -export pgmout=OUTPUT.$$ - -env ######################################################## # Execute the script. -$SRCgfs/exgfs_atmos_gempak_meta.sh +${SRCgfs}/exgfs_atmos_gempak_meta.sh export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf ${DATA} fi -date diff --git a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF index cc24556892..cc9d445965 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF +++ b/jobs/JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF @@ -1,53 +1,35 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +# TODO (#1222) This job is not part of the rocoto suite ############################################ # GFS GEMPAK NCDC PRODUCT GENERATION ############################################ +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_gif" -c "base" -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgfs=${FIXgfs:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} -export UTILgfs=${UTILgfs:-$HOMEgfs/util} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config} +export FIXgfs=${FIXgfs:-${HOMEgfs}/gempak/fix} +export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush} +export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts} +export UTILgfs=${UTILgfs:-${HOMEgfs}/util} ###################################### # Set up the GEMPAK directory ####################################### -export HOMEgempak=${HOMEgempak:-$HOMEgfs/gempak} -export FIXgempak=${FIXgempak:-$HOMEgempak/fix} -export USHgempak=${USHgempak:-$HOMEgempak/ush} +export HOMEgempak=${HOMEgempak:-${HOMEgfs}/gempak} +export FIXgempak=${FIXgempak:-${HOMEgempak}/fix} +export USHgempak=${USHgempak:-${HOMEgempak}/ush} export MP_PULSE=0 export MP_TIMEOUT=2000 -export cycle=t${cyc}z + # # Set up model and cycle specific variables @@ -62,54 +44,45 @@ export fstart=00 ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} export model=${model:-gfs} -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT/gempak} -export COMINgfs=${COMINgfs:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT} +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}/gempak} +export COMINgfs=${COMINgfs:-$(compath.py ${envir}/${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} +export COMINobsproc=${COMINobsproc:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} +export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo +if [ ${SENDCOM} = YES ] ; then + mkdir -m 775 -p ${COMOUT} ${COMOUTwmo} fi export pgmout=OUTPUT.$$ -env - -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" ######################################################## # Execute the script. -$SRCgfs/exgfs_atmos_gempak_gif_ncdc_skew_t.sh +${SRCgfs}/exgfs_atmos_gempak_gif_ncdc_skew_t.sh export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf ${DATA} fi -date diff --git a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC index 0f57c6adb2..a1c2518a44 100755 --- a/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC +++ b/jobs/JGFS_ATMOS_GEMPAK_PGRB2_SPEC @@ -1,41 +1,23 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date +# TODO (#1222) This job is not part of the rocoto suite ############################################ # GFS_PGRB2_SPEC_GEMPAK PRODUCT GENERATION ############################################ +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "gempak_spec" -c "base" -######################################################### -# obtain unique process id (pid) and make temp directory -######################################################### -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export FIXgempak=${FIXgempak:-$HOMEgfs/gempak/fix} -export USHgempak=${USHgempak:-$HOMEgfs/gempak/ush} -export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config} +export FIXgempak=${FIXgempak:-${HOMEgfs}/gempak/fix} +export USHgempak=${USHgempak:-${HOMEgfs}/gempak/ush} +export SRCgfs=${SRCgfs:-${HOMEgfs}/scripts} # For half-degree P Grib files #export DO_HD_PGRB=YES @@ -43,9 +25,7 @@ export SRCgfs=${SRCgfs:-$HOMEgfs/scripts} ################################### # Specify NET and RUN Name and model #################################### -export NET=gfs -export RUN=gfs_goessim -export COMPONENT=${COMPONENT:-atmos} +export COMPONENT="atmos" export finc=3 export model=gfs export EXT="" @@ -53,31 +33,30 @@ export EXT="" ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${NET}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${NET}.${PDY}/${cyc}/$COMPONENT/gempak} +export COMIN=${COMIN:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} +export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${NET}.${PDY})/${cyc}/${COMPONENT}/gempak} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT +if [ ${SENDCOM} = YES ] ; then + mkdir -m 775 -p ${COMOUT} fi -env -export DATA_HOLD=$DATA +# TODO - Assess what is going on with overwriting $DATA here (#1224) -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" +export DATA_HOLD=${DATA} ################################################################# # Execute the script for the regular grib ################################################################# -export DATA=$DATA_HOLD/SPECIAL -mkdir -p $DATA -cd $DATA +export DATA=${DATA_HOLD}/SPECIAL +mkdir -p ${DATA} +cd ${DATA} export DBN_ALERT_TYPE=GFS_GOESSIM_GEMPAK +export RUN2=gfs_goessim export GRIB=goessimpgrb2.0p25.f export EXT=" " export fend=180 @@ -85,21 +64,20 @@ export finc=3 export fstart=000 echo "RUNS the Program" -set -xa ######################################################## # Execute the script. -$SRCgfs/exgfs_atmos_goes_nawips.sh +${SRCgfs}/exgfs_atmos_goes_nawips.sh ################################################################# # Execute the script for the 221 grib -export DATA=$DATA_HOLD/SPECIAL221 -mkdir -p $DATA -cd $DATA +export DATA=${DATA_HOLD}/SPECIAL221 +mkdir -p ${DATA} +cd ${DATA} export DBN_ALERT_TYPE=GFS_GOESSIM221_GEMPAK -export RUN=gfs_goessim221 +export RUN2=gfs_goessim221 export GRIB=goessimpgrb2f export EXT=".grd221" export fend=180 @@ -107,19 +85,15 @@ export finc=3 export fstart=000 echo "RUNS the Program" -set -xa ######################################################## # Execute the script. -$SRCgfs/exgfs_atmos_goes_nawips.sh +${SRCgfs}/exgfs_atmos_goes_nawips.sh export err=$?; err_chk ######################################################## -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - echo "end of program" -cd $DATA_HOLD +cd ${DATA_HOLD} echo "######################################" echo " SPECIAL.OUT " echo "######################################" @@ -127,15 +101,14 @@ echo "######################################" ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [ "${KEEPDATA}" != "YES" ] ; then + rm -rf "${DATA}" fi -date diff --git a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS index 7cc9f69309..48b13c3d9e 100755 --- a/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS +++ b/jobs/JGFS_ATMOS_PGRB2_SPEC_NPOESS @@ -1,71 +1,46 @@ -#!/bin/sh +#! /usr/bin/env bash -set -xa -export PS4='$SECONDS + ' -date - -export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} +# TODO (#1225) This job is not part of the rocoto suite ############################################ # GFS PGRB2_SPECIAL_POST PRODUCT GENERATION ############################################ +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "npoess" -c "base" -########################################################## -# obtain unique process id (pid) and make temp directory -########################################################## -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -setpdy.sh -. PDY +export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} ################################ # Set up the HOME directory ################################ -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export USHgfs=${USHgfs:-$HOMEgfs/ush} -export EXECgfs=${EXECgfs:-$HOMEgfs/exec} -export PARMgfs=${PARMgfs:-$HOMEgfs/parm} -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -export PARMwmo=${PARMwmo:-$HOMEgfs/parm/wmo} -export PARMproduct=${PARMproduct:-$HOMEgfs/parm/product} -export FIXgfs=${FIXgfs:-$HOMEgfs/fix} +export USHgfs=${USHgfs:-${HOMEgfs}/ush} +export EXECgfs=${EXECgfs:-${HOMEgfs}/exec} +export PARMgfs=${PARMgfs:-${HOMEgfs}/parm} +export EXPDIR=${EXPDIR:-${HOMEgfs}/parm/config} +export PARMwmo=${PARMwmo:-${HOMEgfs}/parm/wmo} +export PARMproduct=${PARMproduct:-${HOMEgfs}/parm/product} +export FIXgfs=${FIXgfs:-${HOMEgfs}/fix} ################################### # Specify NET and RUN Name and model #################################### -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} export model=${model:-gfs} -export COMPONENT=${COMPONENT:-atmos} ############################################## # Define COM directories ############################################## -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT} -export COMOUTwmo=${COMOUTwmo:-${COMOUT}/wmo} - export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT $COMOUTwmo -fi +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GOES +GRID="0p50" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GRIB_0p50:COM_ATMOS_GRIB_TMPL -export pgmout=OUTPUT.$$ +if [[ ${SENDCOM} == "YES" ]]; then + mkdir -m 775 -p "${COM_ATMOS_GOES}" +fi -env +# TODO - This should be in the ex-script (#1226) #################################### # Specify Forecast Hour Range @@ -74,11 +49,6 @@ export SHOUR=000 export FHOUR=180 export FHINC=003 -####################################### -# Specify Restart File Name to Key Off -####################################### -restart_file=$COMIN/${RUN}.t${cyc}z.special.grb2if - #################################### # Specify Timeout Behavior of Post # @@ -93,52 +63,41 @@ export SLEEP_INT=5 #################################### # Check if this is a restart #################################### -if test -f $COMIN/$RUN.t${cyc}z.control.goessimpgrb2 -then - modelrecvy=$(cat < $COMIN/$RUN.t${cyc}z.control.goessimpgrb) - recvy_pdy=$(echo $modelrecvy | cut -c1-8) - recvy_cyc=$(echo $modelrecvy | cut -c9-10) - recvy_shour=$(echo $modelrecvy | cut -c11-13) - - if test $RERUN = "NO" - then - NEW_SHOUR=$(expr $recvy_shour + $FHINC) - if test $NEW_SHOUR -ge $SHOUR - then - export SHOUR=$NEW_SHOUR +if [[ -f "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb2" ]]; then + modelrecvy=$(cat < "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb") + recvy_cyc="${modelrecvy:8:2}" + recvy_shour="${modelrecvy:10:13}" + + if [[ ${RERUN} == "NO" ]]; then + NEW_SHOUR=$(( recvy_shour + FHINC )) + if (( NEW_SHOUR >= SHOUR )); then + export SHOUR=${NEW_SHOUR} fi - if test $recvy_shour -ge $FHOUR - then - msg="Forecast Pgrb Generation Already Completed to $FHOUR" - postmsg "$jlogfile" "$msg" + if (( recvy_shour >= FHOUR )); then + echo "Forecast Pgrb Generation Already Completed to ${FHOUR}" else - msg="Starting: PDY=$PDY cycle=t${recvy_cyc}z SHOUR=$SHOUR ." - postmsg "$jlogfile" "$msg" + echo "Starting: PDY=${PDY} cycle=t${recvy_cyc}z SHOUR=${SHOUR}" fi fi fi ############################################################# # Execute the script -$HOMEgfs/scripts/exgfs_atmos_grib2_special_npoess.sh +"${HOMEgfs}/scripts/exgfs_atmos_grib2_special_npoess.sh" export err=$?;err_chk ############################################################# -msg="JOB $job HAS COMPLETED NORMALLY!" -postmsg $jlogfile "$msg" - ############################################ # print exec I/O output ############################################ -if [ -e "$pgmout" ] ; then - cat $pgmout +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" fi ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ] ; then - rm -rf $DATA +if [[ "${KEEPDATA}" != "YES" ]] ; then + rm -rf "${DATA}" fi -date diff --git a/jobs/JGFS_ATMOS_POSTSND b/jobs/JGFS_ATMOS_POSTSND index 5faf16f778..2318d70e31 100755 --- a/jobs/JGFS_ATMOS_POSTSND +++ b/jobs/JGFS_ATMOS_POSTSND @@ -1,63 +1,13 @@ -#!/bin/ksh -set -x - -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date - -############################# -# Source relevant config files -############################# -configs="base postsnd" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done +#! /usr/bin/env bash - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env postsnd -status=$? -[[ $status -ne 0 ]] && exit $status - - -############################################## -# Obtain unique process id (pid) and make temp directory -############################################## -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - -############################################## -# Determine Job Output Name on System -############################################## -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "postsnd" -c "base postsnd" ############################################## # Set variables used in the exglobal script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi +export CDUMP=${RUN/enkf} ######################################## @@ -73,32 +23,31 @@ export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} # Set up the source directories ################################### -export HOMEbufrsnd=${HOMEbufrsnd:-$HOMEgfs} -export EXECbufrsnd=${EXECbufrsnd:-$HOMEbufrsnd/exec} -export FIXbufrsnd=${FIXbufrsnd:-$HOMEbufrsnd/fix/product} -export PARMbufrsnd=${PARMbufrsnd:-$HOMEbufrsnd/parm/product} -export USHbufrsnd=${USHbufrsnd:-$HOMEbufrsnd/ush} -export SCRbufrsnd=${SCRbufrsnd:-$HOMEbufrsnd/scripts} +export HOMEbufrsnd=${HOMEbufrsnd:-${HOMEgfs}} +export EXECbufrsnd=${EXECbufrsnd:-${HOMEbufrsnd}/exec} +export FIXbufrsnd=${FIXbufrsnd:-${HOMEbufrsnd}/fix/product} +export PARMbufrsnd=${PARMbufrsnd:-${HOMEbufrsnd}/parm/product} +export USHbufrsnd=${USHbufrsnd:-${HOMEbufrsnd}/ush} +export SCRbufrsnd=${SCRbufrsnd:-${HOMEbufrsnd}/scripts} ############################## # Define COM Directories ############################## -export COMIN=${COMIN:-$ROTDIR/${CDUMP}.${PDY}/${cyc}/atmos} -export COMOUT=${COMOUT:-$ROTDIR/${CDUMP}.${PDY}/${cyc}/atmos} -export pcom=${pcom:-${COMOUT}/wmo} -export COMAWP=${COMAWP:-${COMOUT}/gempak} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} -[[ ! -d $COMOUT ]] && mkdir -p $COMOUT -[[ ! -d $pcom ]] && mkdir -p $pcom -[[ ! -d $COMAWP ]] && mkdir -p $COMAWP -env +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY COM_ATMOS_BUFR \ + COM_ATMOS_WMO COM_ATMOS_GEMPAK + +[[ ! -d ${COM_ATMOS_BUFR} ]] && mkdir -p "${COM_ATMOS_BUFR}" +[[ ! -d ${COM_ATMOS_GEMPAK} ]] && mkdir -p "${COM_ATMOS_GEMPAK}" +[[ ! -d ${COM_ATMOS_WMO} ]] && mkdir -p "${COM_ATMOS_WMO}" + ######################################################## # Execute the script. -$SCRbufrsnd/exgfs_atmos_postsnd.sh +${SCRbufrsnd}/exgfs_atmos_postsnd.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################## @@ -108,19 +57,16 @@ status=$? ############################################## # Final processing ############################################## -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGFS_ATMOS_VMINMON b/jobs/JGFS_ATMOS_VMINMON new file mode 100755 index 0000000000..a7300b4dd3 --- /dev/null +++ b/jobs/JGFS_ATMOS_VMINMON @@ -0,0 +1,73 @@ +#! /usr/bin/env bash + +########################################################### +# GFS Minimization Monitor (MinMon) job +########################################################### +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" + +########################################################### +# obtain unique process id (pid) and make temp directories +########################################################### +export MINMON_SUFFIX=${MINMON_SUFFIX:-GFS} +export m_job=${m_job:-${MINMON_SUFFIX}_mmDE} + + +############################################## +# Specify Package Areas +############################################## +export SCRgfs=${SCRgfs:-${HOMEgfs}/scripts} +export M_FIXgfs=${M_FIXgfs:-${HOMEgfs}/fix/product} + +export HOMEminmon=${HOMEminmon:-${HOMEgfs}} +export EXECminmon=${EXECminmon:-${HOMEminmon}/exec} +export USHminmon=${USHminmon:-${HOMEminmon}/ush} + + +############################################# +# determine PDY and cyc for previous cycle +############################################# + +pdate=$(${NDATE} -6 ${PDY}${cyc}) +echo 'pdate = ${pdate}' + +export P_PDY=${pdate:0:8} +export p_cyc=${pdate:8:2} + + +############################################# +# TANKverf - WHERE OUTPUT DATA WILL RESIDE +############################################# +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +YMD=${P_PDY} HH=${p_cyc} generate_com -rx COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL + +M_TANKverf=${M_TANKverf:-${COM_ATMOS_ANALYSIS}/minmon} +export M_TANKverfM1=${M_TANKverfM1:-${COM_ATMOS_ANALYSIS_PREV}/minmon} + +mkdir -p -m 775 ${M_TANKverf} + +######################################## +# Set necessary environment variables +######################################## +export CYCLE_INTERVAL=6 +export gsistat=${gsistat:-${COM_ATMOS_ANALYSIS}/gfs.t${cyc}z.gsistat} + + +######################################################## +# Execute the script. +${GMONSH:-${SCRgfs}/exgfs_atmos_vminmon.sh} ${PDY} ${cyc} +err=$? +[[ ${err} -ne 0 ]] && exit ${err} + + +################################ +# Remove the Working Directory +################################ +KEEPDATA=${KEEPDATA:-NO} +cd ${DATAROOT} + +if [ ${KEEPDATA} = NO ] ; then + rm -rf ${DATA} +fi + + diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..065ebe8d0a --- /dev/null +++ b/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE @@ -0,0 +1,56 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlfinal" -c "base aeroanl aeroanlfinal" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_CHEM_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASAEROFINALPY:-${HOMEgfs}/scripts/exglobal_aero_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..2f8c222e18 --- /dev/null +++ b/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE @@ -0,0 +1,49 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlinit" -c "base aeroanl aeroanlinit" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_CHEM_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_CHEM_ANALYSIS_PREV:COM_CHEM_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_CHEM_ANALYSIS}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASAEROINITPY:-${HOMEgfs}/scripts/exglobal_aero_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_AERO_ANALYSIS_RUN b/jobs/JGLOBAL_AERO_ANALYSIS_RUN new file mode 100755 index 0000000000..853909dc03 --- /dev/null +++ b/jobs/JGLOBAL_AERO_ANALYSIS_RUN @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}aeroanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "aeroanlrun" -c "base aeroanl aeroanlrun" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASAERORUNSH:-${HOMEgfs}/scripts/exglobal_aero_analysis_run.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ARCHIVE b/jobs/JGLOBAL_ARCHIVE new file mode 100755 index 0000000000..2d2f8c8814 --- /dev/null +++ b/jobs/JGLOBAL_ARCHIVE @@ -0,0 +1,52 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "arch" -c "base arch" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP=${RUN/enkf} + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_BUFR COM_ATMOS_GEMPAK \ + COM_ATMOS_GENESIS COM_ATMOS_HISTORY COM_ATMOS_INPUT COM_ATMOS_MASTER COM_ATMOS_RESTART \ + COM_ATMOS_TRACK COM_ATMOS_WAFS COM_ATMOS_WMO \ + COM_CHEM_HISTORY \ + COM_ICE_HISTORY COM_ICE_INPUT \ + COM_OBS COM_TOP \ + COM_OCEAN_DAILY COM_OCEAN_HISTORY COM_OCEAN_INPUT COM_OCEAN_XSECT \ + COM_WAVE_GRID COM_WAVE_HISTORY COM_WAVE_STATION + +for grid in "0p25" "0p50" "1p00"; do + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_ATMOS_GRIB_${grid}:COM_ATMOS_GRIB_TMPL" + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_TMPL" +done + +############################################################### +# Run archive script +############################################################### + +${GLOBALARCHIVESH:-${SCRgfs}/exglobal_archive.sh} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || (echo "${DATAROOT} does not exist. ABORT!"; exit 1) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..37a49e0ae0 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE @@ -0,0 +1,48 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlfinal" -c "base atmensanl atmensanlfinal" + +############################################## +# Set variables used in the script +############################################## +GDUMP="gdas" +GDUMP_ENS="enkf${GDUMP}" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variable from template +MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${PDY} HH=${cyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_ENS:COM_ATMOS_ANALYSIS_TMPL + +mkdir -m 755 -p "${COM_ATMOS_ANALYSIS_ENS}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSFINALPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..246502cdfa --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE @@ -0,0 +1,44 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlinit" -c "base atmensanl atmensanlinit" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +RUN=${GDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSINITPY:-${HOMEgfs}/scripts/exglobal_atmens_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN b/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN new file mode 100755 index 0000000000..0d10c76b05 --- /dev/null +++ b/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN @@ -0,0 +1,35 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmensanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmensanlrun" -c "base atmensanl atmensanlrun" + +############################################## +# Set variables used in the script +############################################## + +############################################## +# Begin JOB SPECIFIC work +############################################## + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMENSRUNSH:-${HOMEgfs}/scripts/exglobal_atmens_analysis_run.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS b/jobs/JGLOBAL_ATMOS_ANALYSIS new file mode 100755 index 0000000000..9e5850bfc3 --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS @@ -0,0 +1,122 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "anal" -c "base anal" + + +############################################## +# Set variables used in the script +############################################## +export CDATE=${CDATE:-${PDY}${cyc}} +export CDUMP=${RUN/enkf} +export COMPONENT="atmos" +export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} +export MAKE_NSSTBUFR=${MAKE_NSSTBUFR:-"NO"} +export MAKE_ACFTBUFR=${MAKE_ACFTBUFR:-"NO"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +GDATE=$(${NDATE} -${assim_freq} ${PDY}${cyc}) +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." +export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + +MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_ENS_PREV:COM_ATMOS_HISTORY_TMPL + +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" + +export ATMGES="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.nc" +if [ ! -f ${ATMGES} ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES = ${ATMGES}" + exit 1 +fi + +# Get LEVS +export LEVS=$(${NCLEN} ${ATMGES} pfull) +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +if [ ${DOHYBVAR} = "YES" ]; then + export ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_ENS_PREV}/${GPREFIX_ENS}atmf006.ensmean.nc" + if [ ! -f ${ATMGES_ENSMEAN} ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES_ENSMEAN = ${ATMGES_ENSMEAN}" + exit 2 + fi +fi + + +# Link observational data +export PREPQC="${COM_OBS}/${OPREFIX}prepbufr" +if [[ ! -f ${PREPQC} ]]; then + echo "WARNING: Global PREPBUFR FILE ${PREPQC} MISSING" +fi +export TCVITL="${COM_OBS}/${OPREFIX}syndata.tcvitals.tm00" +if [[ ${DONST} = "YES" ]]; then + if [[ ${MAKE_NSSTBUFR} == "YES" ]]; then + export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" + fi +fi +export PREPQCPF="${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" + +# Copy fix file for obsproc # TODO: Why is this necessary? +if [[ ${RUN} = "gfs" ]]; then + mkdir -p ${ROTDIR}/fix + cp ${FIXgsi}/prepobs_errtable.global ${ROTDIR}/fix/ +fi + + +############################################################### +# Run relevant script + +${ANALYSISSH:-${SCRgfs}/exglobal_atmos_analysis.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# Send Alerts +############################################## +if [ ${SENDDBN} = YES -a ${RUN} = gdas ] ; then + ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias ${job} ${COM_ATMOS_ANALYSIS}/${APREFIX}abias + ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias_pc ${job} ${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc + ${DBNROOT}/bin/dbn_alert MODEL GDAS_MSC_abias_air ${job} ${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air +fi + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC new file mode 100755 index 0000000000..65a571a974 --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC @@ -0,0 +1,82 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "analcalc" -c "base anal analcalc" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP="${RUN/enkf}" +export DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"YES"} + + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +export gPDY=${GDATE:0:8} +export gcyc=${GDATE:8:2} +export GDUMP="gdas" +export GDUMP_ENS="enkf${GDUMP}" + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${RUN}.t${cyc}z." +export GPREFIX_ENS="${GDUMP_ENS}.t${gcyc}z." + +RUN=${CDUMP} YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS COM_ATMOS_RESTART + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + + +export ATMGES="${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.nc" +if [ ! -f ${ATMGES} ]; then + echo "FATAL ERROR: FILE MISSING: ATMGES = ${ATMGES}" + exit 1 +fi + +# Get LEVS +export LEVS=$(${NCLEN} ${ATMGES} pfull) +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +# Generate Gaussian surface analysis # TODO: Should this be removed now that sfcanl is its own job? +export DOGAUSFCANL=${DOGAUSFCANL:-"YES"} + + +############################################################### +# Run relevant script + +${ANALCALCSH:-${SCRgfs}/exglobal_atmos_analysis_calc.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP b/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP index 22389d6f05..fdaca08240 100755 --- a/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP +++ b/jobs/JGLOBAL_ATMOS_EMCSFC_SFC_PREP @@ -1,46 +1,9 @@ -#!/bin/ksh -set -x - -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date - -############################# -# Source relevant config files -############################# -configs="base" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - - -############################################## -# Obtain unique process id (pid) and make temp directory -############################################## -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA +#! /usr/bin/env bash +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "sfc_prep" -c "base" -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - - -############################################## -# Determine Job Output Name on System -############################################## -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile +export RUN_ENVIR=${RUN_ENVIR:-"nco"} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} @@ -50,36 +13,32 @@ export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} ############################################## CDATE=${CDATE:-${PDY}${cyc}} -GDATE=$($NDATE -06 $CDATE) -PDY_m6hrs=$(echo $GDATE | cut -c1-8) -cyc_m6hrs=$(echo $GDATE | cut -c9-10) +GDATE=$(${NDATE} -06 ${CDATE}) +PDY_m6hrs=$(echo ${GDATE} | cut -c1-8) +cyc_m6hrs=$(echo ${GDATE} | cut -c9-10) export cycle_m6hrs=t${cyc_m6hrs}z -export COMPONENT=${COMPONENT:-atmos} -export COMOUT=${COMOUT:-${COMROOT}/$NET/$envir/$RUN.$PDY/$cyc/$COMPONENT} +export COMPONENT="atmos" +export COMOUT=${COMOUT:-$(compath.py -o ${NET}/${gfs_ver}/${RUN}.${PDY})/${cyc}/${COMPONENT}} -export COMINgfs=${COMINgfs:-$(compath.py $NET/$envir/$RUN.$PDY)/$cyc/$COMPONENT} -export COMINgfs_m6hrs=${COMINgfs_m6hrs:-$(compath.py $NET/$envir/$RUN.$PDY_m6hrs)/$cyc_m6hrs/$COMPONENT} +export COMINobsproc=${COMINobsproc:-$(compath.py ${envir}/obsproc/${obsproc_ver})/${RUN}.${PDY}/${cyc}/${COMPONENT}} +export COMIN_m6hrs=${COMIN_m6hrs:-$(compath.py ${envir}/${NET}/${gfs_ver})/${RUN}.${PDY_m6hrs}/${cyc_m6hrs}/${COMPONENT}} -export IMS_FILE=${COMINgfs}/${RUN}.${cycle}.imssnow96.grib2 -export FIVE_MIN_ICE_FILE=${COMINgfs}/${RUN}.${cycle}.seaice.5min.grib2 -export AFWA_NH_FILE=${COMINgfs}/${RUN}.${cycle}.NPR.SNWN.SP.S1200.MESH16.grb -export AFWA_SH_FILE=${COMINgfs}/${RUN}.${cycle}.NPR.SNWS.SP.S1200.MESH16.grb +export IMS_FILE=${COMINobsproc}/${RUN}.${cycle}.imssnow96.grib2 +export FIVE_MIN_ICE_FILE=${COMINobsproc}/${RUN}.${cycle}.seaice.5min.grib2 +export AFWA_NH_FILE=${COMINobsproc}/${RUN}.${cycle}.NPR.SNWN.SP.S1200.MESH16.grb +export AFWA_SH_FILE=${COMINobsproc}/${RUN}.${cycle}.NPR.SNWS.SP.S1200.MESH16.grb export BLENDED_ICE_FILE=${BLENDED_ICE_FILE:-${RUN}.${cycle}.seaice.5min.blend.grb} -export BLENDED_ICE_FILE_m6hrs=${BLENDED_ICE_FILE_m6hrs:-${COMINgfs_m6hrs}/${RUN}.${cycle_m6hrs}.seaice.5min.blend.grb} +export BLENDED_ICE_FILE_m6hrs=${BLENDED_ICE_FILE_m6hrs:-${COMIN_m6hrs}/${RUN}.${cycle_m6hrs}.seaice.5min.blend.grb} ############################################################### # Run relevant script ############################################################### -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" - -${EMCSFCPREPSH:-$SCRgfs/exemcsfc_global_sfc_prep.sh} +${EMCSFCPREPSH:-${SCRgfs}/exemcsfc_global_sfc_prep.sh} status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################## # End JOB SPECIFIC work @@ -89,18 +48,14 @@ status=$? # Final processing ############################################## if [ -e ${pgmout} ]; then - cat $pgmout + cat ${pgmout} fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} -date exit 0 diff --git a/jobs/JGLOBAL_ATMOS_POST b/jobs/JGLOBAL_ATMOS_POST new file mode 100755 index 0000000000..d636be4f30 --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_POST @@ -0,0 +1,122 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "post" -c "base post" + + +#################################### +# Specify version numbers +#################################### +export crtm_ver=${post_crtm_ver:-v2.2.6} +export gfs_ver=${gfs_ver:-v15.0.0} +export hwrf_ver=${hwrf_ver:-v11.0.5} +export g2tmpl_ver=${g2tmpl_ver:-v1.5.0} + +############################################## +# Set variables used in the exglobal script +############################################## +export CDUMP=${RUN/enkf} + + +############################################## +# TODO: Remove this egregious HACK +############################################## +if [[ "${SDATE:-}" = "${PDY}${cyc}" ]]; then + if [[ ${post_times} = "anl" ]]; then + echo "No offline post-processing in the first half cycle for analysis" + exit 0 + fi +fi + + +############################################## +# Begin JOB SPECIFIC work +############################################## +export APRUNP=${APRUN:-${APRUN_NP}} +export RERUN=${RERUN:-NO} +export HOMECRTM=${HOMECRTM:-${PACKAGEROOT}/lib/crtm/${crtm_ver}} +export FIXCRTM=${CRTM_FIX:-${HOMECRTM}/fix} +export PARMpost=${PARMpost:-${HOMEgfs}/parm/post} +export INLINE_POST=${WRITE_DOPOST:-".false."} + +# Construct COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RESTART COM_ATMOS_ANALYSIS COM_ATMOS_HISTORY COM_ATMOS_MASTER +if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -m 775 -p "${COM_ATMOS_MASTER}"; fi + +if [[ ${GOESF} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_GOES + if [[ ! -d ${COM_ATMOS_GOES} ]]; then mkdir -m 775 -p "${COM_ATMOS_GOES}"; fi +fi + +if [[ ${WAFSF} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_WAFS + if [[ ! -d ${COM_ATMOS_WAFS} ]]; then mkdir -m 775 -p "${COM_ATMOS_WAFS}"; fi +fi + +for grid in '0p25' '0p50' '1p00'; do + prod_dir="COM_ATMOS_GRIB_${grid}" + GRID=${grid} YMD=${PDY} HH=${cyc} generate_com -rx "${prod_dir}:COM_ATMOS_GRIB_TMPL" + if [[ ! -d "${prod_dir}" ]]; then mkdir -m 775 -p "${!prod_dir}"; fi +done + +if [ "${RUN}" = gfs ];then + export FHOUT_PGB=${FHOUT_GFS:-3} #Output frequency of gfs pgb file at 1.0 and 0.5 deg. +fi +if [ "${RUN}" = gdas ]; then + export IGEN_GFS="gfs_avn" + export IGEN_ANL="anal_gfs" + export IGEN_FCST="gfs_avn" + export IGEN_GDAS_ANL="anal_gdas" + export FHOUT_PGB=${FHOUT:-1} #Output frequency of gfs pgb file at 1.0 and 0.5 deg. +fi + +if [ "${GRIBVERSION}" = grib2 ]; then + export IGEN_ANL="anal_gfs" + export IGEN_FCST="gfs_avn" + export IGEN_GFS="gfs_avn" +fi + +####################################### +# Specify Restart File Name to Key Off +####################################### +# TODO Improve the name of this variable +export restart_file=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf + +#################################### +# Specify Timeout Behavior of Post +# +# SLEEP_TIME - Amount of time to wait for +# a restart file before exiting +# SLEEP_INT - Amount of time to wait between +# checking for restart files +#################################### +export SLEEP_TIME=900 +export SLEEP_INT=5 + + +############################################################### +# Run relevant exglobal script + +"${HOMEgfs}/scripts/ex${RUN}_atmos_post.sh" +status=$? +(( status != 0 )) && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [ -e "${pgmout}" ]; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || exit 1 +[[ "${KEEPDATA:-NO}" = "NO" ]] && rm -rf "${DATA}" + + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_POST_MANAGER b/jobs/JGLOBAL_ATMOS_POST_MANAGER index 94c848627f..1d82537dca 100755 --- a/jobs/JGLOBAL_ATMOS_POST_MANAGER +++ b/jobs/JGLOBAL_ATMOS_POST_MANAGER @@ -1,62 +1,17 @@ -#!/bin/sh +#! /usr/bin/env bash -######################################## -# GFS post manager -######################################## +# TODO (#1227) This job is not used in the rocoto suite -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "post" -c "base post" -############################# -# Source relevant config files -############################# -set -x -configs="base post" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env post -status=$? -[[ $status -ne 0 ]] && exit $status - -set -xue -# #### 07/30/1999 ################### -# SET SHELL PROCESSING VARIABLES -# ################################### -export PS4='$SECONDS + ' -date #################################### # Specify NET and RUN Name and model #################################### export NET=${NET:-gfs} export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-atmos} -#################################### -# obtain unique process id (pid) and make temp directories -#################################### -export pid=${pid:-$$} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir $DATA -cd $DATA - -#################################### -# Determine Job Output Name on System -#################################### -export outid="LL$job" -export jobid="${outid}.o${pid}" -export pgmout="OUTPUT.${pid}" #################################### # Specify version numbers @@ -66,39 +21,24 @@ export gfs_ver=${gfs_ver:-v15.0.0} #################################### # Specify Execution Areas #################################### -export HOMEgfs=${HOMEgfs:-${NWROOT}/gfs.${gfs_ver}} -export EXECgfs=${HOMEgfs:-$HOMEgfs/exec} -export FIXgfs=${HOMEgfs:-$HOMEgfs/fix} -export PARMgfs=${HOMEgfs:-$HOMEgfs/parm} -export USHgfs=${HOMEgfs:-$HOMEgfs/ush} +export HOMEgfs=${HOMEgfs:-${PACKAGEROOT}/gfs.${gfs_ver}} +export EXECgfs=${HOMEgfs:-${HOMEgfs}/exec} +export FIXgfs=${HOMEgfs:-${HOMEgfs}/fix} +export PARMgfs=${HOMEgfs:-${HOMEgfs}/parm} +export USHgfs=${HOMEgfs:-${HOMEgfs}/ush} ########################### # Set up EXT variable ########################### export EXT_FCST=NO -################################### -# Set up the UTILITIES -################################### -# export HOMEutil=${HOMEutil:-/nw${envir}/util.${util_ver}} -# export utilscript=${utilscript:-$HOMEutil/ush} -# export utilexec=${utilexec:-$HOMEutil/exec} - -########################################### -# Run setpdy and initialize PDY variables -########################################### -export cycle=t${cyc}z -setpdy.sh -. ./PDY - -export ROTDIR=${ROTDIR:-${COMROOT:?}/$NET/$envir} -export COMIN=${COMIN:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} -export COMOUT=${COMOUT:-$ROTDIR/$RUN.$PDY/$cyc/$COMPONENT} +export ROTDIR=${ROTDIR:-${COMROOT:?}/${NET}/${envir}} +export COMIN=${COMIN:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} +export COMOUT=${COMOUT:-${ROTDIR}/${RUN}.${PDY}/${cyc}/atmos} ######################################################## # Execute the script. -$HOMEgfs/scripts/exglobal_atmos_pmgr.sh +${HOMEgfs}/scripts/exglobal_atmos_pmgr.sh ######################################################## -date diff --git a/jobs/JGLOBAL_ATMOS_SFCANL b/jobs/JGLOBAL_ATMOS_SFCANL new file mode 100755 index 0000000000..dcedb7b65b --- /dev/null +++ b/jobs/JGLOBAL_ATMOS_SFCANL @@ -0,0 +1,64 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "sfcanl" -c "base sfcanl" + + +############################################## +# Set variables used in the script +############################################## +export CDUMP="${RUN/enkf}" +if [[ ${RUN_ENVIR} = "nco" ]]; then + export ROTDIR=${COMROOT:?}/${NET}/${envir} +fi + + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +export GDUMP="gdas" + +export OPREFIX="${CDUMP}.t${cyc}z." +export GPREFIX="${GDUMP}.t${gcyc}z." +export APREFIX="${CDUMP}.t${cyc}z." + +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS COM_ATMOS_RESTART \ + COM_LAND_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +############################################################### +# Run relevant script + +${SFCANALSH:-${SCRgfs}/exglobal_atmos_sfcanl.sh} +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + + +exit 0 diff --git a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC index e49e6a0244..d5e4834851 100755 --- a/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC +++ b/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC @@ -1,66 +1,17 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date - - -############################# -# Source relevant config files -############################# -configs="base prep" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env prep -status=$? -[[ $status -ne 0 ]] && exit $status - - -############################################## -# Obtain unique process id (pid) and make temp directory -############################################## -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "prep" -c "base prep" - -############################################## -# Determine Job Output Name on System -############################################## -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile +# TODO (#1220) Evaluate if this is still needed +export RUN_ENVIR=${RUN_ENVIR:-"nco"} ############################################## # Set variables used in the exglobal script ############################################## export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -export COMPONENT=${COMPONENT:-atmos} -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi +export CDUMP=${RUN/enkf} ############################################## @@ -70,32 +21,21 @@ fi export PROCESS_TROPCY=${PROCESS_TROPCY:-YES} # Turn on tropical cyclone tcvitals QC proc. if YES export DO_RELOCATE=${DO_RELOCATE:-NO} # Turn on tropical cyclone relocation proc. if YES - export tmmark=tm00 -if [ $RUN_ENVIR = "nco" ]; then - export ARCHSYND=$COMROOTp3/gfs/${envir}/syndat # this location is unique, do not change -else - export ARCHSYND=${ROTDIR}/syndat -fi -if [ ! -d ${ARCHSYND} ]; then mkdir -p $ARCHSYND; fi +export ARCHSYND=${ROTDIR}/syndat # this location is unique, do not change +if [ ! -d ${ARCHSYND} ]; then mkdir -p ${ARCHSYND}; fi -export HOMENHCp1=${HOMENHCp1:-/gpfs/?p1/nhc/save/guidance/storm-data/ncep} -export HOMENHC=${HOMENHC:-/gpfs/dell2/nhc/save/guidance/storm-data/ncep} - -# JY export TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/${envir}} # path to tropical cyclone record database -export TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/prod} # path to tropical cyclone record database +export HOMENHC=${HOMENHC:-/lfs/h1/ops/prod/dcom/nhc/atcf/ncep} +export TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}} # path to tropical cyclone record database ############################################## # Define COM directories ############################################## -export COMIN=${ROTDIR}/${RUN}.${PDY}/${cyc}/$COMPONENT -export COMOUT=${ROTDIR}/${RUN}.${PDY}/${cyc}/$COMPONENT -if [ ! -d ${COMOUT} ]; then mkdir -p $COMOUT; fi -#export COMINgdas=${ROTDIR}/gdas.${PDY}/${cyc} -#export COMINgfs=${ROTDIR}/gfs.${PDY}/${cyc} +generate_com COM_OBS +if [[ ! -d "${COM_OBS}" ]]; then mkdir -p "${COM_OBS}"; fi -export CRES=$(echo $CASE | cut -c2-) +export CRES=$(echo ${CASE} | cut -c2-) export LATB=$((CRES*2)) export LONB=$((CRES*4)) export BKGFREQ=1 # for hourly relocation @@ -104,33 +44,24 @@ export BKGFREQ=1 # for hourly relocation ############################################## # Run relevant script ############################################## -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - -${TROPCYQCRELOSH:-$SCRgfs/exglobal_atmos_tropcy_qc_reloc.sh} +${TROPCYQCRELOSH:-${SCRgfs}/exglobal_atmos_tropcy_qc_reloc.sh} status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################## # Final processing ############################################## -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - - ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..c0bc56f6e2 --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE @@ -0,0 +1,58 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlfinal" -c "base atmanl atmanlfinal" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMFINALPY:-${HOMEgfs}/scripts/exglobal_atm_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..2d794fb846 --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE @@ -0,0 +1,55 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlinit" -c "base atmanl atmanlinit" + +############################################## +# Set variables used in the script +############################################## +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" +GDUMP_ENS="enkf${GDUMP}" + + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_ATMOS_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_ANALYSIS_PREV:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +MEMDIR='ensstat' RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_HISTORY_ENS_PREV:COM_ATMOS_HISTORY_TMPL + +mkdir -m 775 -p "${COM_ATMOS_ANALYSIS}" + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMINITPY:-${HOMEgfs}/scripts/exglobal_atm_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_ATM_ANALYSIS_RUN b/jobs/JGLOBAL_ATM_ANALYSIS_RUN new file mode 100755 index 0000000000..bbfdbe4a1f --- /dev/null +++ b/jobs/JGLOBAL_ATM_ANALYSIS_RUN @@ -0,0 +1,37 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}atmanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "atmanlrun" -c "base atmanl atmanlrun" + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASATMRUNSH:-${HOMEgfs}/scripts/exglobal_atm_analysis_run.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_FORECAST b/jobs/JGLOBAL_FORECAST index d72a07614f..5be44a8c97 100755 --- a/jobs/JGLOBAL_FORECAST +++ b/jobs/JGLOBAL_FORECAST @@ -1,149 +1,78 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4='$SECONDS + ' -date - -#-------------------------------- -if [ $RUN_ENVIR = "emc" ]; then -#-------------------------------- +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "fcst" -c "base fcst" ############################################## -# Set variables used in the exglobal script +# Set variables used in the script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} - -############################# -# Source relevant config files -############################# -configs="base fcst" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env fcst -status=$? -[[ $status -ne 0 ]] && exit $status - -#-------------------------------- -fi -#-------------------------------- +export CDUMP=${RUN/enkf} ############################################## -# Obtain unique process id (pid) and make temp directory +# Begin JOB SPECIFIC work ############################################## -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY +# Restart conditions for GFS cycle come from GDAS +rCDUMP=${CDUMP} +[[ ${CDUMP} = "gfs" ]] && export rCDUMP="gdas" +# Forecast length for GFS forecast +if [ ${CDUMP} = "gfs" ]; then + export FHMAX=${FHMAX_GFS} + export FHOUT=${FHOUT_GFS} + export FHMAX_HF=${FHMAX_HF_GFS} + export FHOUT_HF=${FHOUT_HF_GFS} +else + export FHMAX_HF=0 + export FHOUT_HF=0 +fi -############################################## -# Determine Job Output Name on System -############################################## -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +declare -x gPDY="${GDATE:0:8}" +declare -x gcyc="${GDATE:8:2}" -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir - export RSTDIR=${GESROOT:?}/$envir -fi +# Construct COM variables from templates (see config.com) +YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \ + COM_ATMOS_HISTORY COM_ATMOS_MASTER COM_TOP +RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL -#-------------------------------- -if [ $RUN_ENVIR = "nco" ]; then -#-------------------------------- - -############################# -# Source relevant config files -############################# -configs="base fcst" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done -# Source additional configs -if [ ${DO_WAVE:-"NO"} = "YES" ]; then - configs="wave" - for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status - done +if [[ ${DO_WAVE} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_RESTART COM_WAVE_PREP COM_WAVE_HISTORY + RUN=${rCDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL + declare -rx RUNwave="${RUN}wave" fi -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env fcst -status=$? -[[ $status -ne 0 ]] && exit $status - -#-------------------------------- +if [[ ${DO_OCN} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_MED_RESTART COM_OCEAN_RESTART COM_OCEAN_INPUT \ + COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS + RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL fi -#-------------------------------- - -# Set wave variables -if [ ${DO_WAVE:-"NO"} = "YES" ]; then - # WAVE component directory - export CDUMPwave=${CDUMPwave:-${CDUMP}wave} - export COMINwave=${COMINwave:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave} - export COMOUTwave=${COMOUTwave:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/wave} +if [[ ${DO_ICE} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART + RUN=${CDUMP} YMD="${gPDY}" HH="${gcyc}" generate_com -rx \ + COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL fi -############################################## -# Begin JOB SPECIFIC work -############################################## - -# Restart conditions for GFS cycle come from GDAS -rCDUMP=$CDUMP -[[ $CDUMP = "gfs" ]] && export rCDUMP="gdas" - -# Forecast length for GFS forecast -if [ $CDUMP = "gfs" ]; then - export FHMAX=$FHMAX_GFS - export FHOUT=$FHOUT_GFS - export FHMAX_HF=$FHMAX_HF_GFS - export FHOUT_HF=$FHOUT_HF_GFS -else - export FHMAX_HF=0 - export FHOUT_HF=0 +if [[ ${DO_AERO} == "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -rx COM_CHEM_HISTORY fi ############################################################### # Run relevant exglobal script -env -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" -$LOGSCRIPT - -${FORECASTSH:-$SCRgfs/exglobal_forecast.sh} +${FORECASTSH:-${SCRgfs}/exglobal_forecast.sh} status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################## @@ -153,18 +82,15 @@ status=$? ############################################## # Final processing ############################################## -if [ -e "$pgmout" ] ; then - cat $pgmout +if [ -e "${pgmout}" ] ; then + cat ${pgmout} fi -msg="ENDED NORMALLY." -postmsg "$jlogfile" "$msg" - ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGLOBAL_LAND_ANALYSIS_FINALIZE b/jobs/JGLOBAL_LAND_ANALYSIS_FINALIZE new file mode 100755 index 0000000000..695888a568 --- /dev/null +++ b/jobs/JGLOBAL_LAND_ANALYSIS_FINALIZE @@ -0,0 +1,54 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}landanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "landanlfinal" -c "base landanl landanlfinal" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_LAND_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_LAND_ANALYSIS_PREV:COM_LAND_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASLANDFINALPY:-${HOMEgfs}/scripts/exglobal_land_analysis_finalize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +########################################## +# Remove the Temporary working directory +########################################## +cd "${DATAROOT}" || ( echo "FATAL ERROR: ${DATAROOT} does not exist, ABORT!"; exit 1 ) +[[ ${KEEPDATA} = "NO" ]] && rm -rf "${DATA}" + +exit 0 diff --git a/jobs/JGLOBAL_LAND_ANALYSIS_INITIALIZE b/jobs/JGLOBAL_LAND_ANALYSIS_INITIALIZE new file mode 100755 index 0000000000..73848b95f9 --- /dev/null +++ b/jobs/JGLOBAL_LAND_ANALYSIS_INITIALIZE @@ -0,0 +1,49 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export DATA=${DATA:-${DATAROOT}/${RUN}landanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "landanlinit" -c "base landanl landanlinit" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_LAND_ANALYSIS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_LAND_ANALYSIS_PREV:COM_LAND_ANALYSIS_TMPL \ + COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +mkdir -m 775 -p "${COM_LAND_ANALYSIS}" + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASLANDINITPY:-${HOMEgfs}/scripts/exglobal_land_analysis_initialize.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_LAND_ANALYSIS_RUN b/jobs/JGLOBAL_LAND_ANALYSIS_RUN new file mode 100755 index 0000000000..46781c4e8f --- /dev/null +++ b/jobs/JGLOBAL_LAND_ANALYSIS_RUN @@ -0,0 +1,39 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +export WIPE_DATA="NO" +export DATA=${DATA:-${DATAROOT}/${RUN}landanl_${cyc}} +source "${HOMEgfs}/ush/jjob_header.sh" -e "landanlrun" -c "base landanl landanlrun" + +############################################## +# Set variables used in the script +############################################## + + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + + +############################################################### +# Run relevant script + +EXSCRIPT=${GDASLANDRUNSH:-${HOMEgfs}/scripts/exglobal_land_analysis_run.sh} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_PREP_LAND_OBS b/jobs/JGLOBAL_PREP_LAND_OBS new file mode 100755 index 0000000000..164f78d8f6 --- /dev/null +++ b/jobs/JGLOBAL_PREP_LAND_OBS @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "preplandobs" -c "base preplandobs" + +############################################## +# Set variables used in the script +############################################## +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(date +%Y%m%d%H -d "${PDY} ${cyc} - ${assim_freq} hours") +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + +############################################## +# Begin JOB SPECIFIC work +############################################## +# Generate COM variables from templates +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS + +RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + +############################################################### +# Run relevant script +EXSCRIPT=${GDASLANDPREPSH:-${HOMEgfs}/scripts/exglobal_prep_land_obs.py} +${EXSCRIPT} +status=$? +[[ ${status} -ne 0 ]] && (echo "FATAL ERROR: Error executing ${EXSCRIPT}, ABORT!"; exit "${status}") + +############################################## +# End JOB SPECIFIC work +############################################## + +############################################## +# Final processing +############################################## +if [[ -e "${pgmout}" ]] ; then + cat "${pgmout}" +fi + +exit 0 diff --git a/jobs/JGLOBAL_WAVE_GEMPAK b/jobs/JGLOBAL_WAVE_GEMPAK index 1a613f020a..b7c97ce571 100755 --- a/jobs/JGLOBAL_WAVE_GEMPAK +++ b/jobs/JGLOBAL_WAVE_GEMPAK @@ -1,67 +1,34 @@ -#!/bin/bash +#! /usr/bin/env bash - -date -set -xa -export PS4='$SECONDS + ' - -# JY - 10/29, move the block in the front, otherwise PDY is not defined for COMIN -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -setpdy.sh -. PDY -env -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - - -# -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} -export machine=${machine:-WCOSS_DELL_P3} -export HOMEgfs=${HOMEgfs:-$(dirname $(dirname $0))} +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "wavegempak" -c "base wave wavegempak" # Add default errchk = err_chk export errchk=${errchk:-err_chk} ################################### # Set COM Paths -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT/gempak} -#export pid=$$ -export pgmout="OUTPUT.$$" - +################################### export DBN_ALERT_TYPE=GFS_WAVE_GEMPAK export SENDCOM=${SENDCOM:-YES} export SENDDBN=${SENDDBN:-YES} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_GRID COM_WAVE_GEMPAK -if [ $SENDCOM = YES ] ; then - mkdir -m 775 -p $COMOUT -fi - +if [[ ! -d ${COM_WAVE_GEMPAK} ]]; then mkdir -p "${COM_WAVE_GEMPAK}"; fi ######################################################## # Execute the script. ${HOMEgfs}/scripts/exgfs_wave_nawips.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ################################### + # Remove temp directories -if [ "$KEEPDATA" != "YES" ]; then - cd $DATAROOT - rm -rf $DATA +cd ${DATAROOT} +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} fi -date exit 0 diff --git a/jobs/JGLOBAL_WAVE_INIT b/jobs/JGLOBAL_WAVE_INIT index bab8f04742..49fccad66f 100755 --- a/jobs/JGLOBAL_WAVE_INIT +++ b/jobs/JGLOBAL_WAVE_INIT @@ -1,83 +1,39 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "waveinit" -c "base wave waveinit" -############################# -# Source relevant config files -############################# -configs="base wave waveinit" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env waveinit -status=$? -[[ $status -ne 0 ]] && exit $status - -# PATH for working directory -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} # Add default errchk = err_chk export errchk=${errchk:-err_chk} -# Create and go to DATA directory -export DATA=${DATA:-${DATAROOT:?}/${jobid}} -mkdir -p $DATA -cd $DATA - -cyc=${cyc:-00} -export cycle=${cycle:-t${cyc}z} - -# Set PDY -setpdy.sh -. PDY - -export pgmout=OUTPUT.$$ - export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-$HOMEgfs/fix/fix_wave_${NET}} -export PARMwave=${PARMwave:-$HOMEgfs/parm/wave} -export USHwave=${USHwave:-$HOMEgfs/ush} -export EXECwave=${EXECwave:-$HOMEgfs/exec} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} +export USHwave=${USHwave:-${HOMEgfs}/ush} +export EXECwave=${EXECwave:-${HOMEgfs}/exec} -# Set COM Paths and GETGES environment -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -[[ ! -d $COMOUT ]] && mkdir -m 775 -p $COMOUT +# Set COM Paths +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP -if [ $SENDCOM = YES ]; then - mkdir -p $COMOUT/rundata +if [ ${SENDCOM} = YES ]; then + mkdir -m 775 -p ${COM_WAVE_PREP} fi # Set mpi serial command -export wavempexec=${launcher:-"mpirun -n"} -export wave_mpmd=${mpmd:-"cfp"} +export wavempexec=${wavempexec:-"mpirun -n"} +export wave_mpmd=${wave_mpmd:-"cfp"} -# Execute the Script -$HOMEgfs/scripts/exgfs_wave_init.sh +# Execute the Script +${HOMEgfs}/scripts/exgfs_wave_init.sh ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNT b/jobs/JGLOBAL_WAVE_POST_BNDPNT index 93b690ad1d..9016d624d7 100755 --- a/jobs/JGLOBAL_WAVE_POST_BNDPNT +++ b/jobs/JGLOBAL_WAVE_POST_BNDPNT @@ -1,107 +1,51 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e - -############################# -# Source relevant config files -############################# -configs="base wave wavepostsbs wavepostbndpnt" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env wavepostbndpnt -status=$? -[[ $status -ne 0 ]] && exit $status - -# PATH for working directory -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} - -export HOMEgefs=${HOMEgefs:-$NWROOT/$NET.${gefs_ver}} -export HOMEgfs=${HOMEgfs:-$NWROOT/$NET.${gfs_ver}} +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostbndpnt" -c "base wave wavepostsbs wavepostbndpnt" # Add default errchk = err_chk export errchk=${errchk:-err_chk} -# Create and go to DATA directory -export DATA=${DATA:-${DATAROOT:?}/${jobid}} -mkdir -p $DATA -cd $DATA - -export cyc=${cyc:-00} -export cycle=${cycle:-t${cyc}z} - -# Set PDY -setpdy.sh -. PDY - -export CDATE=$PDY$cyc - -export pgmout=OUTPUT.$$ - export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-$HOMEgfs/fix/fix_wave_${NET}} -export PARMwave=${PARMwave:-$HOMEgfs/parm/wave} -export USHwave=${USHwave:-$HOMEgfs/ush} -export EXECwave=${EXECwave:-$HOMEgfs/exec} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} +export USHwave=${USHwave:-${HOMEgfs}/ush} +export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION -mkdir -p $COMOUT/station +if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi -env | sort - -# Set wave model ID tag to include member number -# if ensemble; waveMEMB var empty in deterministic # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG -export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} +export WAV_MOD_TAG=${RUN}wave${waveMEMB} export CFP_VERBOSE=1 -export FHMAX_WAV_PNT=180 -if [ $FHMAX_WAV -lt $FHMAX_WAV_PNT ] ; then export FHMAX_WAV_IBP=$FHMAX_WAV ; fi +export FHMAX_WAV_PNT=${FHMAX_WAV_IBP} export DOSPC_WAV='YES' # Spectral post export DOBLL_WAV='NO' # Bulletin post -export DOBNDPNT_WAV='YES' #not boundary points +export DOBNDPNT_WAV='YES' # Do boundary points -# Execute the Script -$HOMEgfs/scripts/exgfs_wave_post_pnt.sh +# Execute the Script +${HOMEgfs}/scripts/exgfs_wave_post_pnt.sh err=$? -if [ $err -ne 0 ]; then - msg="FATAL ERROR: ex-script of GWES_POST failed!" -else - msg="$job completed normally!" +if [ ${err} -ne 0 ]; then + echo "FATAL ERROR: ex-script of GWES_POST failed!" + exit ${err} fi -postmsg "$jlogfile" "$msg" ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL index 5d37dd35fc..c193a28cf7 100755 --- a/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL +++ b/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL @@ -1,108 +1,55 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostbndpntbll" -c "base wave wavepostsbs wavepostbndpntbll" -############################# -# Source relevant config files -############################# -configs="base wave wavepostsbs wavepostbndpnt" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env wavepostbndpntbll -status=$? -[[ $status -ne 0 ]] && exit $status - -# PATH for working directory -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} - -export HOMEgefs=${HOMEgefs:-$NWROOT/$NET.${gefs_ver}} -export HOMEgfs=${HOMEgfs:-$NWROOT/$NET.${gfs_ver}} +export COMPONENT="wave" # Add default errchk = err_chk export errchk=${errchk:-err_chk} -# Create and go to DATA directory -export DATA=${DATA:-${DATAROOT:?}/${jobid}} -mkdir -p $DATA -cd $DATA - -export cyc=${cyc:-00} -export cycle=${cycle:-t${cyc}z} - -# Set PDY -setpdy.sh -. PDY - -export CDATE=$PDY$cyc - -export pgmout=OUTPUT.$$ +export CDATE=${PDY}${cyc} export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-$HOMEgfs/fix/fix_wave_${NET}} -export PARMwave=${PARMwave:-$HOMEgfs/parm/wave} -export USHwave=${USHwave:-$HOMEgfs/ush} -export EXECwave=${EXECwave:-$HOMEgfs/exec} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} +export USHwave=${USHwave:-${HOMEgfs}/ush} +export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} - - -mkdir -p $COMOUT/station +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION -env | sort +if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi -# Set wave model ID tag to include member number -# if ensemble; waveMEMB var empty in deterministic # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG -export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} +export WAV_MOD_TAG=${RUN}wave${waveMEMB} export CFP_VERBOSE=1 -export FHMAX_WAV_PNT=180 -if [ $FHMAX_WAV -lt $FHMAX_WAV_PNT ] ; then export FHMAX_WAV_IBP=$FHMAX_WAV ; fi +export FHMAX_WAV_PNT=${FHMAX_WAV_IBP} export DOSPC_WAV='NO' # Spectral post export DOBLL_WAV='YES' # Bulletin post export DOBNDPNT_WAV='YES' #boundary points # Execute the Script -$HOMEgfs/scripts/exgfs_wave_post_pnt.sh +${HOMEgfs}/scripts/exgfs_wave_post_pnt.sh err=$? -if [ $err -ne 0 ]; then - msg="FATAL ERROR: ex-script of GFS_WAVE_POST_PNT failed!" -else - msg="$job completed normally!" +if [ ${err} -ne 0 ]; then + echo "FATAL ERROR: ex-script of GFS_WAVE_POST_PNT failed!" + exit ${err} fi -postmsg "$jlogfile" "$msg" ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGLOBAL_WAVE_POST_PNT b/jobs/JGLOBAL_WAVE_POST_PNT index 092916b7f6..3ee1d56eef 100755 --- a/jobs/JGLOBAL_WAVE_POST_PNT +++ b/jobs/JGLOBAL_WAVE_POST_PNT @@ -1,83 +1,30 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e - -############################# -# Source relevant config files -############################# -configs="base wave wavepostsbs wavepostpnt" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env wavepostpnt -status=$? -[[ $status -ne 0 ]] && exit $status - -# PATH for working directory -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} - -export HOMEgefs=${HOMEgefs:-$NWROOT/$NET.${gefs_ver}} -export HOMEgfs=${HOMEgfs:-$NWROOT/$NET.${gfs_ver}} +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostpnt" -c "base wave wavepostsbs wavepostpnt" # Add default errchk = err_chk export errchk=${errchk:-err_chk} -# Create and go to DATA directory -export DATA=${DATA:-${DATAROOT:?}/${jobid}} -mkdir -p $DATA -cd $DATA - -export cyc=${cyc:-00} -export cycle=${cycle:-t${cyc}z} - -# Set PDY -setpdy.sh -. ./PDY - -export CDATE=$PDY$cyc - -export pgmout=OUTPUT.$$ - export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-$HOMEgfs/fix/fix_wave_${NET}} -export PARMwave=${PARMwave:-$HOMEgfs/parm/wave} -export USHwave=${USHwave:-$HOMEgfs/ush} -export EXECwave=${EXECwave:-$HOMEgfs/exec} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} +export USHwave=${USHwave:-${HOMEgfs}/ush} +export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_STATION -mkdir -p $COMOUT/station +if [[ ! -d ${COM_WAVE_STATION} ]]; then mkdir -p "${COM_WAVE_STATION}"; fi -env | sort - -# Set wave model ID tag to include member number -# if ensemble; waveMEMB var empty in deterministic # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG -export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} +export WAV_MOD_TAG=${RUN}wave${waveMEMB} export CFP_VERBOSE=1 @@ -87,21 +34,19 @@ export DOBLL_WAV='YES' # Bulletin post export DOBNDPNT_WAV='NO' #not boundary points -# Execute the Script -$HOMEgfs/scripts/exgfs_wave_post_pnt.sh +# Execute the Script +${HOMEgfs}/scripts/exgfs_wave_post_pnt.sh err=$? -if [ $err -ne 0 ]; then - msg="FATAL ERROR: ex-script of GWES_POST failed!" -else - msg="$job completed normally!" +if [ ${err} -ne 0 ]; then + echo "FATAL ERROR: ex-script of GWES_POST failed!" + exir ${err} fi -postmsg "$jlogfile" "$msg" ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGLOBAL_WAVE_POST_SBS b/jobs/JGLOBAL_WAVE_POST_SBS index d798e28def..47e7063db4 100755 --- a/jobs/JGLOBAL_WAVE_POST_SBS +++ b/jobs/JGLOBAL_WAVE_POST_SBS @@ -1,105 +1,49 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e - -############################# -# Source relevant config files -############################# -configs="base wave wavepostsbs" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env wavepostsbs -status=$? -[[ $status -ne 0 ]] && exit $status - -# PATH for working directory -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} - -export HOMEgefs=${HOMEgefs:-$NWROOT/$NET.${gefs_ver}} -export HOMEgfs=${HOMEgfs:-$NWROOT/$NET.${gfs_ver}} +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "wavepostsbs" -c "base wave wavepostsbs" # Add default errchk = err_chk export errchk=${errchk:-err_chk} -# Create and go to DATA directory -export DATA=${DATA:-${DATAROOT:?}/${jobid}} -mkdir -p $DATA -cd $DATA - -export cyc=${cyc:-00} -export cycle=${cycle:-t${cyc}z} - -# Set PDY -setpdy.sh -. ./PDY - -export CDATE=$PDY$cyc - -export pgmout=OUTPUT.$$ - export MP_PULSE=0 # Path to HOME Directory -export FIXwave=${FIXwave:-$HOMEgfs/fix/fix_wave_${NET}} -export PARMwave=${PARMwave:-$HOMEgfs/parm/wave} -export USHwave=${USHwave:-$HOMEgfs/ush} -export EXECwave=${EXECwave:-$HOMEgfs/exec} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} +export USHwave=${USHwave:-${HOMEgfs}/ush} +export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_PREP COM_WAVE_HISTORY COM_WAVE_GRID -export COMINice=${COMINice:-${COMROOTp2}/omb/prod} -export COMINwnd=${COMINwnd:-${COMROOT}/gfs/prod} -export COMIN_WAV_CUR=${COMIN_WAV_CUR:-${COMROOTp2}/rtofs/prod} +mkdir -p "${COM_WAVE_GRID}" -mkdir -p $COMOUT/gridded -env | sort - -# Set wave model ID tag to include member number +# Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic membTAG='p' if [ "${waveMEMB}" == "00" ]; then membTAG='c'; fi export membTAG -export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} +export WAV_MOD_TAG=${RUN}wave${waveMEMB} export CFP_VERBOSE=1 -# Execute the Script -$HOMEgfs/scripts/exgfs_wave_post_gridded_sbs.sh +# Execute the Script +${HOMEgfs}/scripts/exgfs_wave_post_gridded_sbs.sh err=$? -if [ $err -ne 0 ]; then - msg="FATAL ERROR: ex-script of GWES_POST failed!" -else - msg="$job completed normally!" +if [ ${err} -ne 0 ]; then + echo "FATAL ERROR: ex-script of GWES_POST failed!" + exit ${err} fi -postmsg "$jlogfile" "$msg" ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS index db8738dcb9..794258e756 100755 --- a/jobs/JGLOBAL_WAVE_PRDGEN_BULLS +++ b/jobs/JGLOBAL_WAVE_PRDGEN_BULLS @@ -1,61 +1,36 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export PS4=' $SECONDS + ' -set -xa - -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -# Set PDY - setpdy.sh - . PDY -env - -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} -export HOMEgfs=${HOMEgfs:-$(dirname $(dirname $0))} # parent directory of current job card +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "waveawipsbulls" -c "base wave waveawipsbulls" # Add default errchk = err_chk export errchk=${errchk:-err_chk} ################################### # Set COM Paths -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT} -export PCOM=${PCOM:-${COMOUT}/wmo} - +################################### export SENDCOM=${SENDCOM:-YES} export SENDDBN_NTC=${SENDDBN_NTC:-YES} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_STATION COM_WAVE_WMO -if [ $SENDCOM = YES ]; then - mkdir -p $COMOUT $PCOM -fi - +if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi ################################### -# Execute the Script +# Execute the Script -$HOMEgfs/scripts/exgfs_wave_prdgen_bulls.sh +${HOMEgfs}/scripts/exgfs_wave_prdgen_bulls.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + ################################### # Remove temp directories - -if [ "$KEEPDATA" != "YES" ]; then - cd $DATAROOT - rm -rf $DATA +cd ${DATAROOT} +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} fi -date -exit 0 + +exit 0 diff --git a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED index 6e9f8ea5c2..a2134461da 100755 --- a/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED +++ b/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED @@ -1,28 +1,7 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export PS4=' $SECONDS + ' -set -xa - -export DATA=${DATA:-${DATAROOT}/${jobid:?}} -mkdir -p $DATA -cd $DATA - -###################################### -# Set up the cycle variable -###################################### -export cycle=${cycle:-t${cyc}z} - -# Set PDY - setpdy.sh - . PDY - env - -# PATH for working directory -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} -export HOMEgfs=${HOMEgfs:-$(dirname $(dirname $0))} # parent directory of current job card +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "waveawipsgridded" -c "base wave waveawipsgridded" # Add default errchk = err_chk export errchk=${errchk:-err_chk} @@ -30,49 +9,34 @@ export errchk=${errchk:-err_chk} ################################### # Set COM Paths ################################### -export COMIN=${COMIN:-$(compath.py ${NET}/${envir}/${RUN}.${PDY})/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${COMROOT}/${NET}/${envir}/${RUN}.${PDY}/${cyc}/$COMPONENT} -export PCOM=${PCOM:-${COMOUT}/wmo} - - export SENDCOM=${SENDCOM:-YES} export SENDDBN_NTC=${SENDDBN_NTC:-YES} export SENDDBN=${SENDDBN:-NO} export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} +YMD=${PDY} HH=${cyc} generate_com -rx COM_WAVE_GRID COM_WAVE_WMO -if [ $SENDCOM = YES ]; then - mkdir -p $COMOUT $PCOM -fi - -# JY - move up -#export DATA=${DATA:-${DATAROOT}/${jobid:?}} -#mkdir -p $DATA -#cd $DATA -# -####################################### -### Set up the cycle variable -####################################### -#export cycle=${cycle:-t${cyc}z} +if [[ ! -d ${COM_WAVE_WMO} ]]; then mkdir -p "${COM_WAVE_WMO}"; fi -## Set PDY -# setpdy.sh -# . PDY -# env +if [ ${SENDCOM} = YES ]; then + mkdir -p "${COM_WAVE_WMO}" +fi ################################### -# Execute the Script +# Execute the Script ################################### -$HOMEgfs/scripts/exgfs_wave_prdgen_gridded.sh +${HOMEgfs}/scripts/exgfs_wave_prdgen_gridded.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + ################################### # Remove temp directories ################################### -if [ "$KEEPDATA" != "YES" ]; then - cd $DATAROOT - rm -rf $DATA +cd ${DATAROOT} +if [ "${KEEPDATA}" != "YES" ]; then + rm -rf ${DATA} fi -date + + exit 0 diff --git a/jobs/JGLOBAL_WAVE_PREP b/jobs/JGLOBAL_WAVE_PREP index 03aa4c514e..5ff48d886c 100755 --- a/jobs/JGLOBAL_WAVE_PREP +++ b/jobs/JGLOBAL_WAVE_PREP @@ -1,54 +1,15 @@ -#!/bin/bash +#! /usr/bin/env bash -date -export RUN_ENVIR=${RUN_ENVIR:-"nco"} -export PS4=' $SECONDS + ' -set -x -e - -############################# -# Source relevant config files -############################# -configs="base wave waveprep" -export EXPDIR=${EXPDIR:-$HOMEgfs/parm/config} -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env waveprep -status=$? -[[ $status -ne 0 ]] && exit $status - -# PATH for working directory -export NET=${NET:-gfs} -export RUN=${RUN:-gfs} -export COMPONENT=${COMPONENT:-wave} - -export HOMEgfs=${HOMEgfs:-$NWROOT/gfs.${gfs_ver}} +source "${HOMEgfs}/ush/preamble.sh" +source "${HOMEgfs}/ush/jjob_header.sh" -e "waveprep" -c "base wave waveprep" # Add default errchk = err_chk export errchk=${errchk:-err_chk} -# Create and go to DATA directory -export DATA=${DATA:-${DATAROOT:?}/${jobid}} -mkdir -p $DATA -cd $DATA +export CDUMP=${RUN/enkf} -cyc=${cyc:-00} -export cycle=${cycle:-t${cyc}z} - -# Set PDY -setpdy.sh -. ./PDY # Set rtofs PDY -export RPDY=$PDY - -export pgmout=OUTPUT.$$ +export RPDY=${PDY} export MP_PULSE=0 @@ -56,50 +17,24 @@ export MP_PULSE=0 export CDO=${CDO_ROOT}/bin/cdo # Path to HOME Directory -export FIXwave=${FIXwave:-$HOMEgfs/fix/fix_wave_${NET}} -export PARMwave=${PARMwave:-$HOMEgfs/parm/wave} -export USHwave=${USHwave:-$HOMEgfs/ush} -export EXECwave=${EXECwave:-$HOMEgfs/exec} +export FIXwave=${FIXwave:-${HOMEgfs}/fix/fix_wave_${NET}} +export PARMwave=${PARMwave:-${HOMEgfs}/parm/wave} +export USHwave=${USHwave:-${HOMEgfs}/ush} +export EXECwave=${EXECwave:-${HOMEgfs}/exec} # Set COM Paths and GETGES environment -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir -fi -export COMIN=${COMIN:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -export COMOUT=${COMOUT:-${ROTDIR}/${CDUMP}.${PDY}/${cyc}/$COMPONENT} -[[ ! -d $COMOUT ]] && mkdir -m 775 -p $COMOUT +YMD=${PDY} HH=${cyc} generate_com -rx COM_OBS COM_WAVE_PREP +generate_com -rx COM_RTOFS +[[ ! -d ${COM_WAVE_PREP} ]] && mkdir -m 775 -p "${COM_WAVE_PREP}" -if [ $RUN_ENVIR = "nco" ]; then - export COMIN_WAV_ICE=${COMIN_WAV_ICE:-$(compath.py gfs/prod)}/${CDUMP}.${PDY}/${cyc}/atmos - export COMIN_WAV_RTOFS=${COMIN_WAV_RTOFS:-$(compath.py ${WAVECUR_DID}/prod)} -else - if [ $WW3CURINP = "YES" ]; then - if [ ! -d $DMPDIR/${WAVECUR_DID}.${RPDY} ]; then export RPDY=$($NDATE -24 ${PDY}00 | cut -c1-8); fi - if [ ! -L $ROTDIR/${WAVECUR_DID}.${RPDY} ]; then # Check if symlink already exists in ROTDIR - $NLN $DMPDIR/${WAVECUR_DID}.${RPDY} $ROTDIR/${WAVECUR_DID}.${RPDY} - fi - BRPDY=$($NDATE -24 ${RPDY}00 | cut -c1-8) - if [ ! -L $ROTDIR/${WAVECUR_DID}.${BRPDY} ]; then # Check if symlink already exists in ROTDIR - $NLN $DMPDIR/${WAVECUR_DID}.${BRPDY} $ROTDIR/${WAVECUR_DID}.${BRPDY} - fi - export COMIN_WAV_RTOFS=${COMIN_WAV_RTOFS:-$ROTDIR} - fi - if [ $WW3ICEINP = "YES" ]; then - if [ ! -L $ROTDIR/${CDUMP}.${PDY}/${cyc}/atmos/${WAVICEFILE} ]; then # Check if symlink already exists in ROTDIR - $NLN $DMPDIR/$CDUMP.${PDY}/$cyc/${WAVICEFILE} $ROTDIR/$CDUMP.${PDY}/$cyc/atmos/${WAVICEFILE} - fi - export COMIN_WAV_ICE=${COMIN_WAV_ICE:-$ROTDIR/$RUN.$PDY/$cyc/atmos} - fi -fi - -# Execute the Script -$HOMEgfs/scripts/exgfs_wave_prep.sh +# Execute the Script +${HOMEgfs}/scripts/exgfs_wave_prep.sh ########################################## # Remove the Temporary working directory ########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA +cd ${DATAROOT} +[[ ${KEEPDATA} = "NO" ]] && rm -rf ${DATA} + -date exit 0 diff --git a/jobs/rocoto/aeroanlfinal.sh b/jobs/rocoto/aeroanlfinal.sh new file mode 100755 index 0000000000..8f5a445de4 --- /dev/null +++ b/jobs/rocoto/aeroanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="aeroanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/aeroanlinit.sh b/jobs/rocoto/aeroanlinit.sh new file mode 100755 index 0000000000..4e3d32ff9f --- /dev/null +++ b/jobs/rocoto/aeroanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="aeroanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/aeroanlrun.sh b/jobs/rocoto/aeroanlrun.sh new file mode 100755 index 0000000000..0ec2fb8437 --- /dev/null +++ b/jobs/rocoto/aeroanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="aeroanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_AERO_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/jobs/rocoto/aerosol_init.sh b/jobs/rocoto/aerosol_init.sh index d95f043e70..34ccc0fe26 100755 --- a/jobs/rocoto/aerosol_init.sh +++ b/jobs/rocoto/aerosol_init.sh @@ -1,6 +1,6 @@ -#!/bin/bash +#! /usr/bin/env bash -set -x +source "$HOMEgfs/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules @@ -34,5 +34,5 @@ fi ############################################################## # Exit cleanly -set +x + exit 0 diff --git a/jobs/rocoto/anal.sh b/jobs/rocoto/anal.sh index 5f39309dbb..d99152ef19 100755 --- a/jobs/rocoto/anal.sh +++ b/jobs/rocoto/anal.sh @@ -1,13 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="anal" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_ATMOS_ANALYSIS +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/analcalc.sh b/jobs/rocoto/analcalc.sh index df5915086e..2e669b0163 100755 --- a/jobs/rocoto/analcalc.sh +++ b/jobs/rocoto/analcalc.sh @@ -1,13 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="analcalc" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC +${HOMEgfs}/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/analdiag.sh b/jobs/rocoto/analdiag.sh index 6e29a69600..cd6e1113f0 100755 --- a/jobs/rocoto/analdiag.sh +++ b/jobs/rocoto/analdiag.sh @@ -1,13 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="analdiag" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGDAS_ATMOS_ANALYSIS_DIAG +${HOMEgfs}/jobs/JGDAS_ATMOS_ANALYSIS_DIAG status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/arch.sh b/jobs/rocoto/arch.sh index e6ce577d1c..2f62d8b354 100755 --- a/jobs/rocoto/arch.sh +++ b/jobs/rocoto/arch.sh @@ -1,399 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash -############################################################### -## Abstract: -## Archive driver script -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current analysis date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. "${HOMEgfs}"/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Source relevant configs -configs="base arch" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done - -# ICS are restarts and always lag INC by $assim_freq hours -ARCHINC_CYC=$ARCH_CYC -ARCHICS_CYC=$((ARCH_CYC-assim_freq)) -if [ $ARCHICS_CYC -lt 0 ]; then - ARCHICS_CYC=$((ARCHICS_CYC+24)) -fi +[[ ${status} -ne 0 ]] && exit "${status}" -# CURRENT CYCLE -APREFIX="${CDUMP}.t${cyc}z." -ASUFFIX=${ASUFFIX:-$SUFFIX} +export job="arch" +export jobid="${job}.$$" -if [ $ASUFFIX = ".nc" ]; then - format="netcdf" -else - format="nemsio" -fi - - -# Realtime parallels run GFS MOS on 1 day delay -# If realtime parallel, back up CDATE_MOS one day -CDATE_MOS=$CDATE -if [ $REALTIME = "YES" ]; then - CDATE_MOS=$($NDATE -24 $CDATE) -fi -PDY_MOS=$(echo $CDATE_MOS | cut -c1-8) - -############################################################### -# Archive online for verification and diagnostics ############################################################### - -COMIN=${COMINatmos:-"$ROTDIR/$CDUMP.$PDY/$cyc/atmos"} -cd $COMIN - -[[ ! -d $ARCDIR ]] && mkdir -p $ARCDIR -$NCP ${APREFIX}gsistat $ARCDIR/gsistat.${CDUMP}.${CDATE} -$NCP ${APREFIX}pgrb2.1p00.anl $ARCDIR/pgbanl.${CDUMP}.${CDATE}.grib2 - -# Archive 1 degree forecast GRIB2 files for verification -if [ $CDUMP = "gfs" ]; then - fhmax=$FHMAX_GFS - fhr=0 - while [ $fhr -le $fhmax ]; do - fhr2=$(printf %02i $fhr) - fhr3=$(printf %03i $fhr) - $NCP ${APREFIX}pgrb2.1p00.f$fhr3 $ARCDIR/pgbf${fhr2}.${CDUMP}.${CDATE}.grib2 - (( fhr = 10#$fhr + 10#$FHOUT_GFS )) - done -fi -if [ $CDUMP = "gdas" ]; then - flist="000 003 006 009" - for fhr in $flist; do - fname=${APREFIX}pgrb2.1p00.f${fhr} - fhr2=$(printf %02i $fhr) - $NCP $fname $ARCDIR/pgbf${fhr2}.${CDUMP}.${CDATE}.grib2 - done -fi - -if [ -s avno.t${cyc}z.cyclone.trackatcfunix ]; then - PLSOT4=$(echo $PSLOT|cut -c 1-4 |tr '[a-z]' '[A-Z]') - cat avno.t${cyc}z.cyclone.trackatcfunix | sed s:AVNO:${PLSOT4}:g > ${ARCDIR}/atcfunix.${CDUMP}.$CDATE - cat avnop.t${cyc}z.cyclone.trackatcfunix | sed s:AVNO:${PLSOT4}:g > ${ARCDIR}/atcfunixp.${CDUMP}.$CDATE -fi - -if [ $CDUMP = "gdas" -a -s gdas.t${cyc}z.cyclone.trackatcfunix ]; then - PLSOT4=$(echo $PSLOT|cut -c 1-4 |tr '[a-z]' '[A-Z]') - cat gdas.t${cyc}z.cyclone.trackatcfunix | sed s:AVNO:${PLSOT4}:g > ${ARCDIR}/atcfunix.${CDUMP}.$CDATE - cat gdasp.t${cyc}z.cyclone.trackatcfunix | sed s:AVNO:${PLSOT4}:g > ${ARCDIR}/atcfunixp.${CDUMP}.$CDATE -fi - -if [ $CDUMP = "gfs" ]; then - $NCP storms.gfso.atcf_gen.$CDATE ${ARCDIR}/. - $NCP storms.gfso.atcf_gen.altg.$CDATE ${ARCDIR}/. - $NCP trak.gfso.atcfunix.$CDATE ${ARCDIR}/. - $NCP trak.gfso.atcfunix.altg.$CDATE ${ARCDIR}/. - - mkdir -p ${ARCDIR}/tracker.$CDATE/$CDUMP - blist="epac natl" - for basin in $blist; do - cp -rp $basin ${ARCDIR}/tracker.$CDATE/$CDUMP - done -fi - -# Archive required gaussian gfs forecast files for Fit2Obs -if [ $CDUMP = "gfs" -a $FITSARC = "YES" ]; then - VFYARC=${VFYARC:-$ROTDIR/vrfyarch} - [[ ! -d $VFYARC ]] && mkdir -p $VFYARC - mkdir -p $VFYARC/${CDUMP}.$PDY/$cyc - prefix=${CDUMP}.t${cyc}z - fhmax=${FHMAX_FITS:-$FHMAX_GFS} - fhr=0 - while [[ $fhr -le $fhmax ]]; do - fhr3=$(printf %03i $fhr) - sfcfile=${prefix}.sfcf${fhr3}${ASUFFIX} - sigfile=${prefix}.atmf${fhr3}${ASUFFIX} - $NCP $sfcfile $VFYARC/${CDUMP}.$PDY/$cyc/ - $NCP $sigfile $VFYARC/${CDUMP}.$PDY/$cyc/ - (( fhr = 10#$fhr + 6 )) - done -fi - - -############################################################### -# Archive data either to HPSS or locally -if [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then -############################################################### - -# --set the archiving command and create local directories, if necessary -TARCMD="htar" -if [[ $LOCALARCH = "YES" ]]; then - TARCMD="tar" - [ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE - [ ! -d $ATARDIR/$CDATE_MOS -a -d $ROTDIR/gfsmos.$PDY_MOS -a $cyc -eq 18 ] && mkdir -p $ATARDIR/$CDATE_MOS -fi - -#--determine when to save ICs for warm start and forecast-only runs -SAVEWARMICA="NO" -SAVEWARMICB="NO" -SAVEFCSTIC="NO" -firstday=$($NDATE +24 $SDATE) -mm=$(echo $CDATE|cut -c 5-6) -dd=$(echo $CDATE|cut -c 7-8) -nday=$(( (10#$mm-1)*30+10#$dd )) -mod=$(($nday % $ARCH_WARMICFREQ)) -if [ $CDATE -eq $firstday -a $cyc -eq $ARCHINC_CYC ]; then SAVEWARMICA="YES" ; fi -if [ $CDATE -eq $firstday -a $cyc -eq $ARCHICS_CYC ]; then SAVEWARMICB="YES" ; fi -if [ $mod -eq 0 -a $cyc -eq $ARCHINC_CYC ]; then SAVEWARMICA="YES" ; fi -if [ $mod -eq 0 -a $cyc -eq $ARCHICS_CYC ]; then SAVEWARMICB="YES" ; fi - -if [ $ARCHICS_CYC -eq 18 ]; then - nday1=$((nday+1)) - mod1=$(($nday1 % $ARCH_WARMICFREQ)) - if [ $mod1 -eq 0 -a $cyc -eq $ARCHICS_CYC ] ; then SAVEWARMICB="YES" ; fi - if [ $mod1 -ne 0 -a $cyc -eq $ARCHICS_CYC ] ; then SAVEWARMICB="NO" ; fi - if [ $CDATE -eq $SDATE -a $cyc -eq $ARCHICS_CYC ] ; then SAVEWARMICB="YES" ; fi -fi - -mod=$(($nday % $ARCH_FCSTICFREQ)) -if [ $mod -eq 0 -o $CDATE -eq $firstday ]; then SAVEFCSTIC="YES" ; fi - - -ARCH_LIST="$COMIN/archlist" -[[ -d $ARCH_LIST ]] && rm -rf $ARCH_LIST -mkdir -p $ARCH_LIST -cd $ARCH_LIST - -$HOMEgfs/ush/hpssarch_gen.sh $CDUMP +# Execute the JJOB +"${HOMEgfs}"/jobs/JGLOBAL_ARCHIVE status=$? -if [ $status -ne 0 ]; then - echo "$HOMEgfs/ush/hpssarch_gen.sh $CDUMP failed, ABORT!" - exit $status -fi - -cd $ROTDIR - -if [ $CDUMP = "gfs" ]; then - - targrp_list="gfsa gfsb" - - if [ ${ARCH_GAUSSIAN:-"NO"} = "YES" ]; then - targrp_list="$targrp_list gfs_flux gfs_${format}b gfs_pgrb2b" - if [ $MODE = "cycled" ]; then - targrp_list="$targrp_list gfs_${format}a" - fi - fi - - if [ $DO_WAVE = "YES" -a "$WAVE_CDUMP" != "gdas" ]; then - targrp_list="$targrp_list gfswave" - fi - if [ $DO_OCN = "YES" ]; then - targrp_list="$targrp_list ocn_ice_grib2_0p5 ocn_ice_grib2_0p25 ocn_2D ocn_3D ocn_xsect ocn_daily wavocn gfs_flux_1p00" - fi - if [ $DO_ICE = "YES" ]; then - targrp_list="$targrp_list ice" - fi - - # Aerosols - if [ $DO_AERO = "YES" ]; then - for targrp in chem; do - htar -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt) - status=$? - if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "HTAR $CDATE ${targrp}.tar failed" - exit $status - fi - done - fi - - #for restarts - if [ $SAVEFCSTIC = "YES" ]; then - targrp_list="$targrp_list gfs_restarta" - fi - - #for downstream products - if [ $DO_BUFRSND = "YES" -o $WAFSF = "YES" ]; then - targrp_list="$targrp_list gfs_downstream" - fi - - #--save mdl gfsmos output from all cycles in the 18Z archive directory - if [ -d gfsmos.$PDY_MOS -a $cyc -eq 18 ]; then - $TARCMD -P -cvf $ATARDIR/$CDATE_MOS/gfsmos.tar ./gfsmos.$PDY_MOS - status=$? - if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE gfsmos.tar failed" - exit $status - fi - fi -elif [ $CDUMP = "gdas" ]; then - - targrp_list="gdas" - - #gdaswave - if [ $DO_WAVE = "YES" ]; then - targrp_list="$targrp_list gdaswave" - fi - - if [ $SAVEWARMICA = "YES" -o $SAVEFCSTIC = "YES" ]; then - targrp_list="$targrp_list gdas_restarta" - - if [ $DO_WAVE = "YES" ]; then - targrp_list="$targrp_list gdaswave_restart" - fi - fi - - if [ $SAVEWARMICB = "YES" -o $SAVEFCSTIC = "YES" ]; then - targrp_list="$targrp_list gdas_restartb" - fi -fi - -# Turn on extended globbing options -shopt -s extglob -for targrp in $targrp_list; do - $TARCMD -P -cvf $ATARDIR/$CDATE/${targrp}.tar $(cat $ARCH_LIST/${targrp}.txt) - status=$? - if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE ${targrp}.tar failed" - exit $status - fi -done -# Turn extended globbing back off -shopt -u extglob - -############################################################### -fi ##end of HPSS archive -############################################################### - - - -############################################################### -# Clean up previous cycles; various depths -# PRIOR CYCLE: Leave the prior cycle alone -GDATE=$($NDATE -$assim_freq $CDATE) - -# PREVIOUS to the PRIOR CYCLE -GDATE=$($NDATE -$assim_freq $GDATE) -gPDY=$(echo $GDATE | cut -c1-8) -gcyc=$(echo $GDATE | cut -c9-10) - -# Remove the TMPDIR directory -COMIN="$RUNDIR/$GDATE" -[[ -d $COMIN ]] && rm -rf $COMIN - -if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then - exit 0 -fi - -# Step back every assim_freq hours and remove old rotating directories -# for successful cycles (defaults from 24h to 120h). If GLDAS is -# active, retain files needed by GLDAS update. Independent of GLDAS, -# retain files needed by Fit2Obs -DO_GLDAS=${DO_GLDAS:-"NO"} -GDATEEND=$($NDATE -${RMOLDEND:-24} $CDATE) -GDATE=$($NDATE -${RMOLDSTD:-120} $CDATE) -GLDAS_DATE=$($NDATE -96 $CDATE) -RTOFS_DATE=$($NDATE -48 $CDATE) -while [ $GDATE -le $GDATEEND ]; do - gPDY=$(echo $GDATE | cut -c1-8) - gcyc=$(echo $GDATE | cut -c9-10) - COMIN="$ROTDIR/${CDUMP}.$gPDY/$gcyc/atmos" - COMINwave="$ROTDIR/${CDUMP}.$gPDY/$gcyc/wave" - COMINrtofs="$ROTDIR/rtofs.$gPDY" - if [ -d $COMIN ]; then - rocotolog="$EXPDIR/logs/${GDATE}.log" - if [ -f $rocotolog ]; then - testend=$(tail -n 1 $rocotolog | grep "This cycle is complete: Success") - rc=$? - if [ $rc -eq 0 ]; then - if [ -d $COMINwave ]; then rm -rf $COMINwave ; fi - if [ -d $COMINrtofs -a $GDATE -lt $RTOFS_DATE ]; then rm -rf $COMINrtofs ; fi - if [ $CDUMP != "gdas" -o $DO_GLDAS = "NO" -o $GDATE -lt $GLDAS_DATE ]; then - if [ $CDUMP = "gdas" ]; then - for file in $(ls $COMIN |grep -v prepbufr |grep -v cnvstat |grep -v atmanl.nc); do - rm -rf $COMIN/$file - done - else - rm -rf $COMIN - fi - else - if [ $DO_GLDAS = "YES" ]; then - for file in $(ls $COMIN |grep -v sflux |grep -v RESTART |grep -v prepbufr |grep -v cnvstat |grep -v atmanl.nc); do - rm -rf $COMIN/$file - done - for file in $(ls $COMIN/RESTART |grep -v sfcanl ); do - rm -rf $COMIN/RESTART/$file - done - else - for file in $(ls $COMIN |grep -v prepbufr |grep -v cnvstat |grep -v atmanl.nc); do - rm -rf $COMIN/$file - done - fi - fi - fi - fi - fi - - # Remove any empty directories - if [ -d $COMIN ]; then - [[ ! "$(ls -A $COMIN)" ]] && rm -rf $COMIN - fi - - if [ -d $COMINwave ]; then - [[ ! "$(ls -A $COMINwave)" ]] && rm -rf $COMINwave - fi - - # Remove mdl gfsmos directory - if [ $CDUMP = "gfs" ]; then - COMIN="$ROTDIR/gfsmos.$gPDY" - if [ -d $COMIN -a $GDATE -lt $CDATE_MOS ]; then rm -rf $COMIN ; fi - fi - - GDATE=$($NDATE +$assim_freq $GDATE) -done - -# Remove archived gaussian files used for Fit2Obs in $VFYARC that are -# $FHMAX_FITS plus a delta before $CDATE. Touch existing archived -# gaussian files to prevent the files from being removed by automatic -# scrubber present on some machines. - -if [ $CDUMP = "gfs" ]; then - fhmax=$((FHMAX_FITS+36)) - RDATE=$($NDATE -$fhmax $CDATE) - rPDY=$(echo $RDATE | cut -c1-8) - COMIN="$VFYARC/$CDUMP.$rPDY" - [[ -d $COMIN ]] && rm -rf $COMIN - - TDATE=$($NDATE -$FHMAX_FITS $CDATE) - while [ $TDATE -lt $CDATE ]; do - tPDY=$(echo $TDATE | cut -c1-8) - tcyc=$(echo $TDATE | cut -c9-10) - TDIR=$VFYARC/$CDUMP.$tPDY/$tcyc - [[ -d $TDIR ]] && touch $TDIR/* - TDATE=$($NDATE +6 $TDATE) - done -fi - -# Remove $CDUMP.$rPDY for the older of GDATE or RDATE -GDATE=$($NDATE -${RMOLDSTD:-120} $CDATE) -fhmax=$FHMAX_GFS -RDATE=$($NDATE -$fhmax $CDATE) -if [ $GDATE -lt $RDATE ]; then - RDATE=$GDATE -fi -rPDY=$(echo $RDATE | cut -c1-8) -COMIN="$ROTDIR/$CDUMP.$rPDY" -[[ -d $COMIN ]] && rm -rf $COMIN - - -############################################################### -exit 0 +exit "${status}" diff --git a/jobs/rocoto/atmanlfinal.sh b/jobs/rocoto/atmanlfinal.sh new file mode 100755 index 0000000000..3c75c52cb0 --- /dev/null +++ b/jobs/rocoto/atmanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmanlinit.sh b/jobs/rocoto/atmanlinit.sh new file mode 100755 index 0000000000..7bb2587f0b --- /dev/null +++ b/jobs/rocoto/atmanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmanlrun.sh b/jobs/rocoto/atmanlrun.sh new file mode 100755 index 0000000000..aad80e0b06 --- /dev/null +++ b/jobs/rocoto/atmanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATM_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlfinal.sh b/jobs/rocoto/atmensanlfinal.sh new file mode 100755 index 0000000000..838e9712f8 --- /dev/null +++ b/jobs/rocoto/atmensanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlinit.sh b/jobs/rocoto/atmensanlinit.sh new file mode 100755 index 0000000000..0ab78a1083 --- /dev/null +++ b/jobs/rocoto/atmensanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/atmensanlrun.sh b/jobs/rocoto/atmensanlrun.sh new file mode 100755 index 0000000000..91efdb3768 --- /dev/null +++ b/jobs/rocoto/atmensanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="atmensanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_ATMENS_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/jobs/rocoto/awips.sh b/jobs/rocoto/awips.sh index 8d94cdef20..f9289255f9 100755 --- a/jobs/rocoto/awips.sh +++ b/jobs/rocoto/awips.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### ## Abstract: @@ -13,136 +15,61 @@ ############################################################### ############################################################### -echo -echo "=============== BEGIN TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ $status -ne 0 ]] && exit $status +(( status != 0 )) && exit "${status}" +export job="awips" +export jobid="${job}.$$" -############################################################### -echo -echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" -configs="base awips" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done +# TODO (#1228) - This script is doing more than just calling a j-job +# Also, this forces us to call the config files here instead of the j-job +source "${HOMEgfs}/ush/jjob_header.sh" -e "awips" -c "base awips" -fhrlst=$(echo $FHRLST | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') +fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') ############################################################### -echo -echo "=============== BEGIN TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. $BASE_ENV/${machine}.env awips -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -export COMPONENT=${COMPONENT:-atmos} -export CDATEm1=$($NDATE -24 $CDATE) -export PDYm1=$(echo $CDATEm1 | cut -c1-8) - -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/awips$FHRGRP" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT - ################################################################################ echo echo "=============== BEGIN AWIPS ===============" -export SENDCOM="YES" -export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export PCOM="$COMOUT/wmo" -export jlogfile="$ROTDIR/logs/$CDATE/jgfs_awips.log" - -SLEEP_TIME=1800 -SLEEP_INT=5 -SLEEP_LOOP_MAX=$(expr $SLEEP_TIME / $SLEEP_INT) -for fhr in $fhrlst; do - - if [ $fhr -gt $FHMAX_GFS ]; then - echo "Nothing to process for FHR = $fhr, cycle" +for fhr in ${fhrlst}; do + if (( fhr > FHMAX_GFS )); then + echo "Nothing to process for FHR = ${fhr}, cycle" continue fi fhmin=0 fhmax=84 - if [ $fhr -ge $fhmin -a $fhr -le $fhmax ] ; then - if [[ $(expr $fhr % 3) -eq 0 ]]; then - fhr3=$(printf %03i $fhr) - -# Check for input file existence. If not present, sleep -# Loop SLEEP_LOOP_MAX times. Abort if not found. - ic=1 - while [[ $ic -le $SLEEP_LOOP_MAX ]]; do - if [ -s $COMOUT/$CDUMP.t${cyc}z.pgrb2b.0p25.f${fhr3}.idx ]; then - break - else - ic=$(expr $ic + 1) - sleep $SLEEP_INT - fi - if [ $ic -eq $SLEEP_LOOP_MAX ]; then - echo "***FATAL ERROR*** $COMOUT/$CDUMP.t${cyc}z.pgrb2b.0p25.f${fhr3}.idx NOT available" - export err=9 - err_chk - fi - done - - export fcsthrs=$fhr3 - export job="jgfs_awips_f${fcsthrs}_20km_${cyc}" - export DATA="${DATAROOT}/$job" - $AWIPS20SH - fi - - if [[ $(expr $fhr % 6) -eq 0 ]]; then - export job="jgfs_awips_f${fcsthrs}_${cyc}" - export DATA="${DATAROOT}/$job" - $AWIPSG2SH - fi + if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 3 == 0)); then + fhr3=$(printf %03d $((10#${fhr}))) + export fcsthrs=${fhr3} + ${AWIPS20SH} + fi + + if ((fhr % 6 == 0)); then + ${AWIPSG2SH} + fi fi fhmin=90 fhmax=240 - if [ $fhr -ge $fhmin -a $fhr -le $fhmax ]; then - - if [[ $(expr $fhr % 6) -eq 0 ]]; then - fhr3=$(printf %03i $fhr) - -# Check for input file existence. If not present, sleep -# Loop SLEEP_LOOP_MAX times. Abort if not found. - ic=1 - while [[ $ic -le $SLEEP_LOOP_MAX ]]; do - if [ -s $COMOUT/$CDUMP.t${cyc}z.pgrb2b.0p25.f${fhr3}.idx ]; then - break - else - ic=$(expr $ic + 1) - sleep $SLEEP_INT - fi - if [ $ic -eq $SLEEP_LOOP_MAX ]; then - echo "***FATAL ERROR*** $COMOUT/$CDUMP.t${cyc}z.pgrb2b.0p25.f${fhr3}.idx NOT available" - export err=9 - err_chk - fi - done - - export fcsthrs=$fhr3 - export job="jgfs_awips_f${fcsthrs}_20km_${cyc}" - export DATA="${DATAROOT}/$job" - $AWIPS20SH - - export job="jgfs_awips_f${fcsthrs}_${cyc}" - export DATA="${DATAROOT}/$job" - $AWIPSG2SH - fi + if (( fhr >= fhmin && fhr <= fhmax )); then + if ((fhr % 6 == 0)); then + fhr3=$(printf %03i $((10#${fhr}))) + export fcsthrs=${fhr3} + ${AWIPS20SH} + ${AWIPSG2SH} + fi fi done ############################################################### # Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi +if [[ ${KEEPDATA:-"NO"} == "NO" ]] ; then rm -rf "${DATA}" ; fi + exit 0 diff --git a/jobs/rocoto/coupled_ic.sh b/jobs/rocoto/coupled_ic.sh index 973d6b4ad4..ca2cfc82af 100755 --- a/jobs/rocoto/coupled_ic.sh +++ b/jobs/rocoto/coupled_ic.sh @@ -1,14 +1,12 @@ -#!/bin/bash +#! /usr/bin/env bash -set -x +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: -## Create FV3 initial conditions from GFS intitial conditions -## RUN_ENVIR : runtime environment (emc | nco) +## Copy initial conditions from BASE_CPLIC to ROTDIR for coupled forecast-only runs ## HOMEgfs : /full/path/to/workflow ## EXPDIR : /full/path/to/config/files -## CDATE : current date (YYYYMMDDHH) ## CDUMP : cycle name (gdas / gfs) ## PDY : current date (YYYYMMDD) ## cyc : current cycle (HH) @@ -16,101 +14,124 @@ set -x ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} err=0 ############################################################### # Source relevant configs configs="base coupled_ic wave" -for config in $configs; do - . $EXPDIR/config.${config} +for config in ${configs}; do + . ${EXPDIR}/config.${config} status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done ############################################################### # Source machine runtime environment -. $BASE_ENV/${machine}.env config.coupled_ic +. ${BASE_ENV}/${machine}.env config.coupled_ic status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -# Create ICSDIR if needed -[[ ! -d $ICSDIR/$CDATE ]] && mkdir -p $ICSDIR/$CDATE -[[ ! -d $ICSDIR/$CDATE/atmos ]] && mkdir -p $ICSDIR/$CDATE/atmos -[[ ! -d $ICSDIR/$CDATE/ocn ]] && mkdir -p $ICSDIR/$CDATE/ocn -[[ ! -d $ICSDIR/$CDATE/ice ]] && mkdir -p $ICSDIR/$CDATE/ice +############################################################### +# Locally scoped variables and functions +GDATE=$(date -d "${PDY} ${cyc} - ${assim_freq} hours" +%Y%m%d%H) +gPDY="${GDATE:0:8}" +gcyc="${GDATE:8:2}" -if [ $ICERES = '025' ]; then - ICERESdec="0.25" -fi -if [ $ICERES = '050' ]; then - ICERESdec="0.50" -fi +error_message(){ + echo "FATAL ERROR: Unable to copy ${1} to ${2} (Error code ${3})" +} -# Setup ATM initial condition files -cp -r $BASE_CPLIC/$CPL_ATMIC/$CDATE/$CDUMP/* $ICSDIR/$CDATE/atmos/ +############################################################### +# Start staging + +# Stage the FV3 initial conditions to ROTDIR (cold start) +YMD=${PDY} HH=${cyc} generate_com -r COM_ATMOS_INPUT +[[ ! -d "${COM_ATMOS_INPUT}" ]] && mkdir -p "${COM_ATMOS_INPUT}" +source="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${CDUMP}/${CASE}/INPUT/gfs_ctrl.nc" +target="${COM_ATMOS_INPUT}/gfs_ctrl.nc" +${NCP} "${source}" "${target}" rc=$? -if [[ $rc -ne 0 ]] ; then - echo "FATAL: Unable to copy $BASE_CPLIC/$CPL_ATMIC/$CDATE/$CDUMP/* to $ICSDIR/$CDATE/atmos/ (Error code $rc)" -fi -((err+=$rc)) - +[[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" +err=$((err + rc)) +for ftype in gfs_data sfc_data; do + for tt in $(seq 1 6); do + source="${BASE_CPLIC}/${CPL_ATMIC}/${PDY}${cyc}/${CDUMP}/${CASE}/INPUT/${ftype}.tile${tt}.nc" + target="${COM_ATMOS_INPUT}/${ftype}.tile${tt}.nc" + ${NCP} "${source}" "${target}" + rc=$? + [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" + err=$((err + rc)) + done +done -# Setup Ocean IC files -cp -r $BASE_CPLIC/$CPL_OCNIC/$CDATE/ocn/$OCNRES/MOM*.nc $ICSDIR/$CDATE/ocn/ -rc=$? -if [[ $rc -ne 0 ]] ; then - echo "FATAL: Unable to copy $BASE_CPLIC/$CPL_OCNIC/$CDATE/ocn/$OCNRES/MOM*.nc to $ICSDIR/$CDATE/ocn/ (Error code $rc)" +# Stage ocean initial conditions to ROTDIR (warm start) +if [[ "${DO_OCN:-}" = "YES" ]]; then + YMD=${gPDY} HH=${gcyc} generate_com -r COM_OCEAN_RESTART + [[ ! -d "${COM_OCEAN_RESTART}" ]] && mkdir -p "${COM_OCEAN_RESTART}" + source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res.nc" + target="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res.nc" + ${NCP} "${source}" "${target}" + rc=$? + [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" + err=$((err + rc)) + case "${OCNRES}" in + "025") + for nn in $(seq 1 4); do + source="${BASE_CPLIC}/${CPL_OCNIC}/${PDY}${cyc}/ocn/${OCNRES}/MOM.res_${nn}.nc" + if [[ -f "${source}" ]]; then + target="${COM_OCEAN_RESTART}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" + ${NCP} "${source}" "${target}" + rc=$? + [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" + err=$((err + rc)) + fi + done + ;; + *) + echo "FATAL ERROR: Unsupported ocean resolution ${OCNRES}" + rc=1 + err=$((err + rc)) + ;; + esac fi -((err+=$rc)) -#Setup Ice IC files -cp $BASE_CPLIC/$CPL_ICEIC/$CDATE/ice/$ICERES/cice5_model_${ICERESdec}.res_$CDATE.nc $ICSDIR/$CDATE/ice/cice_model_${ICERESdec}.res_$CDATE.nc -rc=$? -if [[ $rc -ne 0 ]] ; then - echo "FATAL: Unable to copy $BASE_CPLIC/$CPL_ICEIC/$CDATE/ice/$ICERES/cice5_model_${ICERESdec}.res_$CDATE.nc to $ICSDIR/$CDATE/ice/cice_model_${ICERESdec}.res_$CDATE.nc (Error code $rc)" +# Stage ice initial conditions to ROTDIR (cold start as these are SIS2 generated) +if [[ "${DO_ICE:-}" = "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -r COM_ICE_RESTART + [[ ! -d "${COM_ICE_RESTART}" ]] && mkdir -p "${COM_ICE_RESTART}" + ICERESdec=$(echo "${ICERES}" | awk '{printf "%0.2f", $1/100}') + source="${BASE_CPLIC}/${CPL_ICEIC}/${PDY}${cyc}/ice/${ICERES}/cice5_model_${ICERESdec}.res_${PDY}${cyc}.nc" + target="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc" + ${NCP} "${source}" "${target}" + rc=$? + [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" + err=$((err + rc)) fi -((err+=$rc)) -if [ $DO_WAVE = "YES" ]; then - [[ ! -d $ICSDIR/$CDATE/wav ]] && mkdir -p $ICSDIR/$CDATE/wav - for grdID in $waveGRD - do - cp $BASE_CPLIC/$CPL_WAVIC/$CDATE/wav/$grdID/*restart.$grdID $ICSDIR/$CDATE/wav/ +# Stage the WW3 initial conditions to ROTDIR (warm start; TODO: these should be placed in $RUN.$gPDY/$gcyc) +if [[ "${DO_WAVE:-}" = "YES" ]]; then + YMD=${PDY} HH=${cyc} generate_com -r COM_WAVE_RESTART + [[ ! -d "${COM_WAVE_RESTART}" ]] && mkdir -p "${COM_WAVE_RESTART}" + for grdID in ${waveGRD}; do # TODO: check if this is a bash array; if so adjust + source="${BASE_CPLIC}/${CPL_WAVIC}/${PDY}${cyc}/wav/${grdID}/${PDY}.${cyc}0000.restart.${grdID}" + target="${COM_WAVE_RESTART}/${PDY}.${cyc}0000.restart.${grdID}" + ${NCP} "${source}" "${target}" rc=$? - if [[ $rc -ne 0 ]] ; then - echo "FATAL: Unable to copy $BASE_CPLIC/$CPL_WAVIC/$CDATE/wav/$grdID/*restart.$grdID to $ICSDIR/$CDATE/wav/ (Error code $rc)" - fi - ((err+=$rc)) + [[ ${rc} -ne 0 ]] && error_message "${source}" "${target}" "${rc}" + err=$((err + rc)) done fi -# Stage the FV3 initial conditions to ROTDIR -export OUTDIR="$ICSDIR/$CDATE/atmos/$CASE/INPUT" -COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/atmos" -[[ ! -d $COMOUT ]] && mkdir -p $COMOUT -cd $COMOUT || exit 99 -rm -rf INPUT -$NLN $OUTDIR . - -#Stage the WW3 initial conditions to ROTDIR -if [ $DO_WAVE = "YES" ]; then - export OUTDIRw="$ICSDIR/$CDATE/wav" - COMOUTw="$ROTDIR/$CDUMP.$PDY/$cyc/wave/restart" - [[ ! -d $COMOUTw ]] && mkdir -p $COMOUTw - cd $COMOUTw || exit 99 - $NLN $OUTDIRw/* . +############################################################### +# Check for errors and exit if any of the above failed +if [[ "${err}" -ne 0 ]] ; then + echo "FATAL ERROR: Unable to copy ICs from ${BASE_CPLIC} to ${ROTDIR}; ABORT!" + exit "${err}" fi -if [[ $err -ne 0 ]] ; then - echo "Fatal Error: ICs are not properly set-up" - exit $err -fi - ############################################################## # Exit cleanly - -set +x exit 0 diff --git a/jobs/rocoto/earc.sh b/jobs/rocoto/earc.sh index 5d57abf26e..c4c7341698 100755 --- a/jobs/rocoto/earc.sh +++ b/jobs/rocoto/earc.sh @@ -1,222 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash -############################################################### -## Abstract: -## Ensemble archive driver script -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current analysis date (YYYYMMDDHH) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -## CDUMP : cycle name (gdas / gfs) -## ENSGRP : ensemble sub-group to archive (0, 1, 2, ...) -############################################################### +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Source relevant configs -configs="base earc" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done - -export COMPONENT=${COMPONENT:-atmos} - -n=$((ENSGRP)) - -# ICS are restarts and always lag INC by $assim_freq hours. -EARCINC_CYC=$ARCH_CYC -EARCICS_CYC=$((ARCH_CYC-assim_freq)) -if [ $EARCICS_CYC -lt 0 ]; then - EARCICS_CYC=$((EARCICS_CYC+24)) -fi - -# EnKF update in GFS, GDAS or both -CDUMP_ENKF=$(echo ${EUPD_CYC:-"gdas"} | tr a-z A-Z) - -ARCH_LIST="$ROTDIR/enkf${CDUMP}.$PDY/$cyc/$COMPONENT/earc$ENSGRP" -[[ -d $ARCH_LIST ]] && rm -rf $ARCH_LIST -mkdir -p $ARCH_LIST -cd $ARCH_LIST - -$HOMEgfs/ush/hpssarch_gen.sh enkf${CDUMP} +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -if [ $status -ne 0 ]; then - echo "$HOMEgfs/ush/hpssarch_gen.sh enkf${CDUMP} failed, ABORT!" - exit $status -fi - -cd $ROTDIR - - -################################################################### -# ENSGRP > 0 archives a group of ensemble members -firstday=$($NDATE +24 $SDATE) -if [[ $ENSGRP -gt 0 ]] && [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then - -#--set the archiving command and create local directories, if necessary - TARCMD="htar" - if [[ $LOCALARCH = "YES" ]]; then - TARCMD="tar" - [ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE - fi - -#--determine when to save ICs for warm start - SAVEWARMICA="NO" - SAVEWARMICB="NO" - mm=$(echo $CDATE|cut -c 5-6) - dd=$(echo $CDATE|cut -c 7-8) - nday=$(( (mm-1)*30+dd )) - mod=$(($nday % $ARCH_WARMICFREQ)) - if [ $CDATE -eq $firstday -a $cyc -eq $EARCINC_CYC ]; then SAVEWARMICA="YES" ; fi - if [ $CDATE -eq $firstday -a $cyc -eq $EARCICS_CYC ]; then SAVEWARMICB="YES" ; fi - if [ $mod -eq 0 -a $cyc -eq $EARCINC_CYC ]; then SAVEWARMICA="YES" ; fi - if [ $mod -eq 0 -a $cyc -eq $EARCICS_CYC ]; then SAVEWARMICB="YES" ; fi - - if [ $EARCICS_CYC -eq 18 ]; then - nday1=$((nday+1)) - mod1=$(($nday1 % $ARCH_WARMICFREQ)) - if [ $mod1 -eq 0 -a $cyc -eq $EARCICS_CYC ] ; then SAVEWARMICB="YES" ; fi - if [ $mod1 -ne 0 -a $cyc -eq $EARCICS_CYC ] ; then SAVEWARMICB="NO" ; fi - if [ $CDATE -eq $SDATE -a $cyc -eq $EARCICS_CYC ] ; then SAVEWARMICB="YES" ; fi - fi - - if [ $CDATE -gt $SDATE ]; then # Don't run for first half cycle - - $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_grp${n}.txt) - status=$? - if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_grp${ENSGRP}.tar failed" - exit $status - fi - - if [ $SAVEWARMICA = "YES" -a $cyc -eq $EARCINC_CYC ]; then - $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restarta_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restarta_grp${n}.txt) - status=$? - if [ $status -ne 0 ]; then - echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_restarta_grp${ENSGRP}.tar failed" - exit $status - fi - fi - - if [ $SAVEWARMICB = "YES" -a $cyc -eq $EARCICS_CYC ]; then - $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}_restartb_grp${ENSGRP}.tar $(cat $ARCH_LIST/enkf${CDUMP}_restartb_grp${n}.txt) - status=$? - if [ $status -ne 0 ]; then - echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}_restartb_grp${ENSGRP}.tar failed" - exit $status - fi - fi +[[ ${status} -ne 0 ]] && exit "${status}" - fi # CDATE>SDATE - -fi - - -################################################################### -# ENSGRP 0 archives ensemble means and copy data to online archive -if [ $ENSGRP -eq 0 ]; then - - if [[ $HPSSARCH = "YES" || $LOCALARCH = "YES" ]]; then - -#--set the archiving command and create local directories, if necessary - TARCMD="htar" - if [[ $LOCALARCH = "YES" ]]; then - TARCMD="tar" - [ ! -d $ATARDIR/$CDATE ] && mkdir -p $ATARDIR/$CDATE - fi - - $TARCMD -P -cvf $ATARDIR/$CDATE/enkf${CDUMP}.tar $(cat $ARCH_LIST/enkf${CDUMP}.txt) - status=$? - if [ $status -ne 0 -a $CDATE -ge $firstday ]; then - echo "$(echo $TARCMD | tr 'a-z' 'A-Z') $CDATE enkf${CDUMP}.tar failed" - exit $status - fi - fi - - #-- Archive online for verification and diagnostics - [[ ! -d $ARCDIR ]] && mkdir -p $ARCDIR - cd $ARCDIR - - $NCP $ROTDIR/enkf${CDUMP}.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.enkfstat enkfstat.${CDUMP}.$CDATE - $NCP $ROTDIR/enkf${CDUMP}.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.gsistat.ensmean gsistat.${CDUMP}.${CDATE}.ensmean - - if [ $CDUMP_ENKF != "GDAS" ]; then - $NCP $ROTDIR/enkfgfs.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.enkfstat enkfstat.gfs.$CDATE - $NCP $ROTDIR/enkfgfs.$PDY/$cyc/$COMPONENT/${CDUMP}.t${cyc}z.gsistat.ensmean gsistat.gfs.${CDATE}.ensmean - fi - -fi - - -if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then - exit 0 -fi +export job="earc" +export jobid="${job}.$$" ############################################################### -# ENSGRP 0 also does clean-up -if [ $ENSGRP -eq 0 ]; then - - # Start start and end dates to remove - GDATEEND=$($NDATE -${RMOLDEND_ENKF:-24} $CDATE) - GDATE=$($NDATE -${RMOLDSTD_ENKF:-120} $CDATE) - while [ $GDATE -le $GDATEEND ]; do - - gPDY=$(echo $GDATE | cut -c1-8) - gcyc=$(echo $GDATE | cut -c9-10) - - # Loop over GDAS and GFS EnKF directories separately. - clist="gdas gfs" - for ctype in $clist; do - COMIN_ENS="$ROTDIR/enkf$ctype.$gPDY/$gcyc/$COMPONENT" - if [ -d $COMIN_ENS ]; then - rocotolog="$EXPDIR/logs/${GDATE}.log" - if [ -f $rocotolog ]; then - testend=$(tail -n 1 $rocotolog | grep "This cycle is complete: Success") - rc=$? - if [ $rc -eq 0 ]; then - # Retain f006.ens files. Remove everything else - for file in $(ls $COMIN_ENS | grep -v f006.ens); do - rm -rf $COMIN_ENS/$file - done - fi - fi - fi - - # Remove empty directories - if [ -d $COMIN_ENS ] ; then - [[ ! "$(ls -A $COMIN_ENS)" ]] && rm -rf $COMIN_ENS - fi - done - - # Advance to next cycle - GDATE=$($NDATE +$assim_freq $GDATE) - - done - -fi +# Execute the JJOB +"${HOMEgfs}/jobs/JGDAS_ENKF_ARCHIVE" +status=$? -# Remove enkf*.$rPDY for the older of GDATE or RDATE -GDATE=$($NDATE -${RMOLDSTD_ENKF:-120} $CDATE) -fhmax=$FHMAX_GFS -RDATE=$($NDATE -$fhmax $CDATE) -if [ $GDATE -lt $RDATE ]; then - RDATE=$GDATE -fi -rPDY=$(echo $RDATE | cut -c1-8) -clist="gdas gfs" -for ctype in $clist; do - COMIN="$ROTDIR/enkf$ctype.$rPDY" - [[ -d $COMIN ]] && rm -rf $COMIN -done -############################################################### -exit 0 +exit "${status}" diff --git a/jobs/rocoto/ecen.sh b/jobs/rocoto/ecen.sh index 8c88a63ef5..744956b1ff 100755 --- a/jobs/rocoto/ecen.sh +++ b/jobs/rocoto/ecen.sh @@ -1,27 +1,32 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### # Loop over groups to Execute the JJOB -fhrlst=$(echo $FHRLST | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') -for fhr in $fhrlst; do +fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') +for fhr in ${fhrlst}; do - export FHMIN_ECEN=$fhr - export FHMAX_ECEN=$fhr - export FHOUT_ECEN=$fhr - export job=ecen${fhr} + export FHMIN_ECEN=${fhr} + export FHMAX_ECEN=${fhr} + export FHOUT_ECEN=${fhr} + export job=ecen + export jobid="${job}.$$" - $HOMEgfs/jobs/JGDAS_ENKF_ECEN + ${HOMEgfs}/jobs/JGDAS_ENKF_ECEN status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/echgres.sh b/jobs/rocoto/echgres.sh index 733257349e..5779a91f06 100755 --- a/jobs/rocoto/echgres.sh +++ b/jobs/rocoto/echgres.sh @@ -1,13 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="echgres" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGDAS_ATMOS_CHGRES_FORENKF +${HOMEgfs}/jobs/JGDAS_ATMOS_CHGRES_FORENKF status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/ediag.sh b/jobs/rocoto/ediag.sh index cb9df99b9b..8462edf296 100755 --- a/jobs/rocoto/ediag.sh +++ b/jobs/rocoto/ediag.sh @@ -1,13 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="ediag" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGDAS_ENKF_DIAG +${HOMEgfs}/jobs/JGDAS_ENKF_DIAG status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/efcs.sh b/jobs/rocoto/efcs.sh index 04d5eb3c3e..46a25ac759 100755 --- a/jobs/rocoto/efcs.sh +++ b/jobs/rocoto/efcs.sh @@ -1,13 +1,34 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#status=$? +#[[ ${status} -ne 0 ]] && exit ${status} + +# TODO: clean this up +source "${HOMEgfs}/ush/detect_machine.sh" +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/sorc/ufs_model.fd/tests" +module load modules.ufs_model.lua +# Workflow needs utilities from prod_util (setPDY.sh, ndate, etc.) +module load prod_util +if [[ "${MACHINE_ID}" = "wcoss2" ]]; then + module load cray-pals +fi +module list +unset MACHINE_ID +set_trace + +export job="efcs" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGDAS_ENKF_FCST +${HOMEgfs}/jobs/JGDAS_ENKF_FCST status=$? -exit $status + +exit ${status} diff --git a/jobs/rocoto/eobs.sh b/jobs/rocoto/eobs.sh index c635f9ed44..95fa42cb08 100755 --- a/jobs/rocoto/eobs.sh +++ b/jobs/rocoto/eobs.sh @@ -1,13 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="eobs" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGDAS_ENKF_SELECT_OBS +${HOMEgfs}/jobs/JGDAS_ENKF_SELECT_OBS status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/eomg.sh b/jobs/rocoto/eomg.sh deleted file mode 100755 index a0519c5318..0000000000 --- a/jobs/rocoto/eomg.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -x - -############################################################### -# Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Execute the JJOB -$HOMEgfs/jobs/JGDAS_ENKF_INNOVATE_OBS -status=$? -exit $status diff --git a/jobs/rocoto/epos.sh b/jobs/rocoto/epos.sh index e7ad93e69e..d1f890a930 100755 --- a/jobs/rocoto/epos.sh +++ b/jobs/rocoto/epos.sh @@ -1,28 +1,33 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} +export job="epos" +export jobid="${job}.$$" + ############################################################### # Loop over groups to Execute the JJOB -fhrlst=$(echo $FHRLST | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') +fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') -for fhr in $fhrlst; do - - export FHMIN_EPOS=$fhr - export FHMAX_EPOS=$fhr - export FHOUT_EPOS=$fhr - export job=epos${fhr} - - $HOMEgfs/jobs/JGDAS_ENKF_POST +for fhr in ${fhrlst}; do + + export FHMIN_EPOS=${fhr} + export FHMAX_EPOS=${fhr} + export FHOUT_EPOS=${fhr} + ${HOMEgfs}/jobs/JGDAS_ENKF_POST status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/esfc.sh b/jobs/rocoto/esfc.sh index 50ee46e97a..85f44151c9 100755 --- a/jobs/rocoto/esfc.sh +++ b/jobs/rocoto/esfc.sh @@ -1,13 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="esfc" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGDAS_ENKF_SFC +${HOMEgfs}/jobs/JGDAS_ENKF_SFC status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/eupd.sh b/jobs/rocoto/eupd.sh index 1d8dc3b119..3ed028f87a 100755 --- a/jobs/rocoto/eupd.sh +++ b/jobs/rocoto/eupd.sh @@ -1,13 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="eupd" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGDAS_ENKF_UPDATE +${HOMEgfs}/jobs/JGDAS_ENKF_UPDATE status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/fcst.sh b/jobs/rocoto/fcst.sh index 199c89724a..512bee127f 100755 --- a/jobs/rocoto/fcst.sh +++ b/jobs/rocoto/fcst.sh @@ -1,13 +1,53 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status +#. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +#status=$? +#[[ ${status} -ne 0 ]] && exit ${status} + +# TODO: clean this up +source "${HOMEgfs}/ush/detect_machine.sh" +set +x +source "${HOMEgfs}/ush/module-setup.sh" +module use "${HOMEgfs}/sorc/ufs_model.fd/tests" +module load modules.ufs_model.lua +module load prod_util +if [[ "${MACHINE_ID}" = "wcoss2" ]]; then + module load cray-pals +fi +if [[ "${MACHINE_ID}" = "hera" ]]; then + module use "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/modulefiles/core" + module load "miniconda3/4.6.14" + module load "gfs_workflow/1.0.0" +# TODO: orion and wcoss2 will be uncommented when they are ready. This comment block will be removed in the next PR +#elif [[ "${MACHINE_ID}" = "orion" ]]; then +# module use "/home/rmahajan/opt/global-workflow/modulefiles/core" +# module load "python/3.7.5" +# module load "gfs_workflow/1.0.0" +#elif [[ "${MACHINE_ID}" = "wcoss2" ]]; then +# module load "python/3.7.5" +fi +module list +unset MACHINE_ID +set_trace + +############################################################### +# exglobal_forecast.py requires the following in PYTHONPATH +# This will be moved to a module load when ready +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src:${HOMEgfs}/ush/python/pygfs" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +export job="fcst" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_FORECAST +${HOMEgfs}/jobs/JGLOBAL_FORECAST status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/fit2obs.sh b/jobs/rocoto/fit2obs.sh new file mode 100755 index 0000000000..d991234fbe --- /dev/null +++ b/jobs/rocoto/fit2obs.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +echo +echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="fit2obs" +export jobid="${job}.$$" + +############################################################### +echo +echo "=============== START TO RUN FIT2OBS ===============" +# Execute the JJOB +"${HOMEgfs}/jobs/JGDAS_FIT2OBS" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +exit 0 diff --git a/jobs/rocoto/gempak.sh b/jobs/rocoto/gempak.sh index 2b119ab3f4..14950535c8 100755 --- a/jobs/rocoto/gempak.sh +++ b/jobs/rocoto/gempak.sh @@ -1,69 +1,17 @@ -#!/bin/bash -x +#! /usr/bin/env bash -############################################################### -## Abstract: -## Inline gempak driver script -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current analysis date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### +source "${HOMEgfs}/ush/preamble.sh" ############################################################### -echo -echo "=============== BEGIN TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} +export job="gempak" +export jobid="${job}.$$" -############################################################### -echo -echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" -configs="base gempak" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done - +# Execute the JJOB +${HOMEgfs}/jobs/JGFS_ATMOS_GEMPAK -############################################################### -echo -echo "=============== BEGIN TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. $BASE_ENV/${machine}.env gempak status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -export COMPONENT=${COMPONENT:-atmos} -export CDATEm1=$($NDATE -24 $CDATE) -export PDYm1=$(echo $CDATEm1 | cut -c1-8) - -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/gempak" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT - - -################################################################################ -echo -echo "=============== BEGIN GEMPAK ===============" -export job="jgfs_gempak_${cyc}" -export jlogfile="$ROTDIR/logs/$CDATE/$job.log" -export DATA="${DATAROOT}/$job" -export SENDCOM="YES" -export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT/gempak" -export FIXgfs="" # set blank so that GEMPAKSH defaults FIXgfs to HOMEgfs/gempak/fix -export USHgfs="" # set blank so that GEMPAKSH defaults FIXgfs to HOMEgfs/gempak/ush - -$GEMPAKSH - - -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi -exit 0 +exit ${status} diff --git a/jobs/rocoto/getic.sh b/jobs/rocoto/getic.sh index 700799b677..050b0dbe6d 100755 --- a/jobs/rocoto/getic.sh +++ b/jobs/rocoto/getic.sh @@ -1,161 +1,60 @@ -#!/bin/bash -x - -############################################################### -## Abstract: -## Get GFS intitial conditions -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### - -############################################################### -# Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Source relevant configs -configs="base getic init" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done - -############################################################### -# Source machine runtime environment -. $BASE_ENV/${machine}.env getic -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Set script and dependency variables - -export yy=$(echo $CDATE | cut -c1-4) -export mm=$(echo $CDATE | cut -c5-6) -export dd=$(echo $CDATE | cut -c7-8) -export hh=${cyc:-$(echo $CDATE | cut -c9-10)} -export GDATE=$($NDATE -${assim_freq:-"06"} $CDATE) -export gyy=$(echo $GDATE | cut -c1-4) -export gmm=$(echo $GDATE | cut -c5-6) -export gdd=$(echo $GDATE | cut -c7-8) -export ghh=$(echo $GDATE | cut -c9-10) - -export DATA=${DATA:-${DATAROOT}/getic} -export EXTRACT_DIR=${DATA:-$EXTRACT_DIR} -export PRODHPSSDIR=${PRODHPSSDIR:-/NCEPPROD/hpssprod/runhistory} -export COMPONENT="atmos" -export gfs_ver=${gfs_ver:-"v16"} -export OPS_RES=${OPS_RES:-"C768"} -export GETICSH=${GETICSH:-${GDASINIT_DIR}/get_v16.data.sh} - -# Create ROTDIR/EXTRACT_DIR -if [ ! -d $ROTDIR ]; then mkdir -p $ROTDIR ; fi -if [ ! -d $EXTRACT_DIR ]; then mkdir -p $EXTRACT_DIR ; fi -cd $EXTRACT_DIR - -# Check version, cold/warm start, and resolution -if [[ $gfs_ver = "v16" && $EXP_WARM_START = ".true." && $CASE = $OPS_RES ]]; then # Pull warm start ICs - no chgres - - # Pull RESTART files off HPSS - if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input - - # Pull prior cycle restart files - htar -xvf ${HPSSDIR}/${GDATE}/gdas_restartb.tar - status=$? - [[ $status -ne 0 ]] && exit $status - - # Pull current cycle restart files - htar -xvf ${HPSSDIR}/${CDATE}/gfs_restarta.tar - status=$? - [[ $status -ne 0 ]] && exit $status - - # Pull IAU increment files - htar -xvf ${HPSSDIR}/${CDATE}/gfs_netcdfa.tar - status=$? - [[ $status -ne 0 ]] && exit $status - - else # Opertional input - warm starts - - cd $ROTDIR - # Pull CDATE gfs restart tarball - htar -xvf ${PRODHPSSDIR}/rh${yy}/${yy}${mm}/${yy}${mm}${dd}/com_gfs_prod_gfs.${yy}${mm}${dd}_${hh}.gfs_restart.tar - # Pull GDATE gdas restart tarball - htar -xvf ${PRODHPSSDIR}/rh${gyy}/${gyy}${gmm}/${gyy}${gmm}${gdd}/com_gfs_prod_gdas.${gyy}${gmm}${gdd}_${ghh}.gdas_restart.tar - fi - -else # Pull chgres cube inputs for cold start IC generation - - # Run UFS_UTILS GETICSH - sh ${GETICSH} ${CDUMP} - status=$? - [[ $status -ne 0 ]] && exit $status - -fi - -# Move extracted data to ROTDIR -if [ ! -d ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT} ]; then mkdir -p ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT} ; fi -if [ $gfs_ver = v16 -a $RETRO = "YES" ]; then - mv ${EXTRACT_DIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/* ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT} +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +## this script makes links to FV3GFS netcdf files under /public and copies over GFS analysis file for verification +## /home/rtfim/UFS_CAMSUITE/FV3GFSrun/FV3ICS/YYYYMMDDHH/gfs +## gfs.tHHz.sfcanl.nc -> /public/data/grids/gfs/netcdf/YYDDDHH00.gfs.tHHz.sfcanl.nc +## gfs.tHHz.atmanl.nc -> /public/data/grids/gfs/netcdf/YYDDDHH00.gfs.tHHz.atmanl.nc + +echo +echo "CDATE = $CDATE" +echo "CDUMP = $CDUMP" +echo "COMPONENT = $COMPONENT" +echo "ICSDIR = $ICSDIR" +echo "PUBDIR = $PUBDIR" +echo "GFSDIR = $GFSDIR" +echo "RETRODIR = $RETRODIR" +echo "ROTDIR = $ROTDIR" +echo "PSLOT = $PSLOT" +echo + +## initialize +yyyymmdd=`echo $CDATE | cut -c1-8` +hh=`echo $CDATE | cut -c9-10` +yyddd=`date +%y%j -u -d $yyyymmdd` +fv3ic_dir=${ROTDIR}/${CDUMP}.${yyyymmdd}/${hh}/model_data/${COMPONENT} + +## create links in FV3ICS directory +mkdir -p $fv3ic_dir +cd $fv3ic_dir +echo "making link to netcdf files under $fv3ic_dir" + +pubsfc_file=${yyddd}${hh}00.${CDUMP}.t${hh}z.sfcanl.nc +sfc_file=`echo $pubsfc_file | cut -d. -f2-` +pubatm_file=${yyddd}${hh}00.${CDUMP}.t${hh}z.atmanl.nc +atm_file=`echo $pubatm_file | cut -d. -f2-` + +echo "pubsfc_file: $pubsfc_file" +echo "pubatm_file: $pubatm_file" + +if [[ -f $RETRODIR/${pubatm_file} ]]; then + echo "linking $RETRODIR...." + echo "pubsfc_file: $pubsfc_file" + echo "pubatm_file: $pubatm_file" + ln -fs $RETRODIR/${pubsfc_file} $sfc_file + ln -fs $RETRODIR/${pubatm_file} $atm_file +elif [[ -f $PUBDIR/${pubatm_file} ]]; then + echo "linking $PUBDIR...." + ln -fs $PUBDIR/${pubsfc_file} $sfc_file + ln -fs $PUBDIR/${pubatm_file} $atm_file +elif [[ -f $RETRODIR/${CDUMP}.${yyyymmdd}/${hh}/${COMPONENT}/${atm_file} ]]; then + echo "linking $RETRODIR/${CDUMP}.${yyyymmdd}/${hh}/${COMPONENT}..." + echo "sfc_file: $sfc_file" + echo "atm_file: $atm_file" + ln -s $RETRODIR/${CDUMP}.${yyyymmdd}/${hh}/${COMPONENT}/${sfc_file} + ln -s $RETRODIR/${CDUMP}.${yyyymmdd}/${hh}/${COMPONENT}/${atm_file} else - mv ${EXTRACT_DIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/* ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh} + echo "missing input files!" + exit 1 fi - -# Pull pgbanl file for verification/archival - v14+ -if [ $gfs_ver = v14 -o $gfs_ver = v15 -o $gfs_ver = v16 ]; then - for grid in 0p25 0p50 1p00 - do - file=gfs.t${hh}z.pgrb2.${grid}.anl - - if [ $gfs_ver = v14 ]; then # v14 production source - - cd $ROTDIR/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT} - export tarball="gpfs_hps_nco_ops_com_gfs_prod_gfs.${yy}${mm}${dd}${hh}.pgrb2_${grid}.tar" - htar -xvf ${PRODHPSSDIR}/rh${yy}/${yy}${mm}/${yy}${mm}${dd}/${tarball} ./${file} - - elif [ $gfs_ver = v15 ]; then # v15 production source - - cd $EXTRACT_DIR - export tarball="com_gfs_prod_gfs.${yy}${mm}${dd}_${hh}.gfs_pgrb2.tar" - htar -xvf ${PRODHPSSDIR}/rh${yy}/${yy}${mm}/${yy}${mm}${dd}/${tarball} ./${CDUMP}.${yy}${mm}${dd}/${hh}/${file} - mv ${EXTRACT_DIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${file} ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT}/${file} - - elif [ $gfs_ver = v16 ]; then # v16 - determine RETRO or production source next - - if [ $RETRO = "YES" ]; then # Retrospective parallel source - - cd $EXTRACT_DIR - if [ $grid = "0p25" ]; then # anl file spread across multiple tarballs - export tarball="gfsa.tar" - elif [ $grid = "0p50" -o $grid = "1p00" ]; then - export tarball="gfsb.tar" - fi - htar -xvf ${HPSSDIR}/${yy}${mm}${dd}${hh}/${tarball} ./${CDUMP}.${yy}${mm}${dd}/${hh}/${file} - mv ${EXTRACT_DIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${file} ${ROTDIR}/${CDUMP}.${yy}${mm}${dd}/${hh}/${COMPONENT}/${file} - - else # Production source - - cd $ROTDIR - export tarball="com_gfs_prod_gfs.${yy}${mm}${dd}_${hh}.gfs_pgrb2.tar" - htar -xvf ${PRODHPSSDIR}/rh${yy}/${yy}${mm}/${yy}${mm}${dd}/${tarball} ./${CDUMP}.${yy}${mm}${dd}/${hh}/atmos/${file} - - fi # RETRO vs production - - fi # Version check - done # grid loop -fi # v14-v16 pgrb anl file pull - -########################################## -# Remove the Temporary working directory -########################################## -cd $DATAROOT -[[ $KEEPDATA = "NO" ]] && rm -rf $DATA - -############################################################### -# Exit out cleanly -exit 0 diff --git a/jobs/rocoto/getic_gsl_fv3gfsv15.sh b/jobs/rocoto/getic_gsl_fv3gfsv15.sh index e60a7c918a..f28e6c5ab9 100755 --- a/jobs/rocoto/getic_gsl_fv3gfsv15.sh +++ b/jobs/rocoto/getic_gsl_fv3gfsv15.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ## this script makes links to FV3GFS (GFSv15.1) nemsio files under /public and copies over GFS analysis file for verification ## /scratch4/BMC/rtfim/rtfuns/FV3GFS/FV3ICS/YYYYMMDDHH/gfs @@ -24,13 +26,6 @@ hh=`echo $CDATE | cut -c9-10` yyddd=`date +%y%j -u -d $yyyymmdd` fv3ic_dir=${ROTDIR}/${CDUMP}.${yyyymmdd}/${hh}/${COMPONENT} -## EMC archive on disk -## /scratch1/NCEPDEV/rstprod/com/gfs/prod -## gfs.t00z.atmanl.nemsio -## gfs.t00z.sfcanl.nemsio -## -EMCDIR=/scratch1/NCEPDEV/rstprod/com/gfs/prod - ## create links in FV3ICS directory mkdir -p $fv3ic_dir cd $fv3ic_dir @@ -52,10 +47,6 @@ elif [[ -f $RETRODIR/${pubsfc_file} ]]; then echo "linking $RETRODIR...." ln -fs $RETRODIR/${pubsfc_file} $sfc_file ln -fs $RETRODIR/${pubatm_file} $atm_file -elif [[ -f $EMCDIR/${pubsfc_file} ]]; then - echo "linking $EMCDIR...." - ln -fs $EMCDIR/${sfc_file} $sfc_file - ln -fs $EMCDIR/${atm_file} $atm_file else echo "missing input files!" exit 1 diff --git a/jobs/rocoto/getic_gsl_fv3gfsv16.sh b/jobs/rocoto/getic_gsl_fv3gfsv16.sh index c53afed25c..ec5c3dcef2 100755 --- a/jobs/rocoto/getic_gsl_fv3gfsv16.sh +++ b/jobs/rocoto/getic_gsl_fv3gfsv16.sh @@ -1,18 +1,18 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ## this script makes links to FV3GFS netcdf files under /public and copies over GFS analysis file for verification ## /home/rtfim/UFS_CAMSUITE/FV3GFSrun/FV3ICS/YYYYMMDDHH/gfs ## gfs.tHHz.sfcanl.nc -> /public/data/grids/gfs/netcdf/YYDDDHH00.gfs.tHHz.sfcanl.nc ## gfs.tHHz.atmanl.nc -> /public/data/grids/gfs/netcdf/YYDDDHH00.gfs.tHHz.atmanl.nc - echo echo "CDATE = $CDATE" echo "CDUMP = $CDUMP" echo "COMPONENT = $COMPONENT" echo "ICSDIR = $ICSDIR" echo "PUBDIR = $PUBDIR" -echo "GFSDIR = $GFSDIR" echo "RETRODIR = $RETRODIR" echo "ROTDIR = $ROTDIR" echo "PSLOT = $PSLOT" diff --git a/jobs/rocoto/gldas.sh b/jobs/rocoto/gldas.sh deleted file mode 100755 index 87fba5a211..0000000000 --- a/jobs/rocoto/gldas.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -x - -############################################################### -# Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Execute the JJOB. GLDAS only runs once per day. - -$HOMEgfs/jobs/JGDAS_ATMOS_GLDAS -status=$? - -exit $status diff --git a/jobs/rocoto/init.sh b/jobs/rocoto/init.sh index ed03799e2b..ca458d4bcf 100755 --- a/jobs/rocoto/init.sh +++ b/jobs/rocoto/init.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" ############################################################### ## Abstract: @@ -41,7 +43,7 @@ export mm=$(echo $CDATE | cut -c5-6) export dd=$(echo $CDATE | cut -c7-8) export hh=${cyc:-$(echo $CDATE | cut -c9-10)} -export DATA=${DATA:-${DATAROOT}/init} +export DATA=${DATA:-${DATAROOT}/init_$CDATE} export EXTRACT_DIR=${EXTRACT_DIR:-$ROTDIR} export WORKDIR=${WORKDIR:-$DATA} export OUTDIR=${OUTDIR:-$ROTDIR} @@ -53,7 +55,7 @@ export RUNICSH=${RUNICSH:-${GDASINIT_DIR}/run_v16.chgres.sh} # Check if init is needed and run if so if [[ $gfs_ver = "v16" && $EXP_WARM_START = ".true." && $CASE = $OPS_RES ]]; then echo "Detected v16 $OPS_RES warm starts, will not run init. Exiting..." - exit 0 + else # Run chgres_cube if [ ! -d $OUTDIR ]; then mkdir -p $OUTDIR ; fi @@ -70,4 +72,6 @@ cd $DATAROOT ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/landanlfinal.sh b/jobs/rocoto/landanlfinal.sh new file mode 100755 index 0000000000..a6fa48c679 --- /dev/null +++ b/jobs/rocoto/landanlfinal.sh @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="landanlfinal" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS_FINALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/landanlinit.sh b/jobs/rocoto/landanlinit.sh new file mode 100755 index 0000000000..e9c0b2d7a2 --- /dev/null +++ b/jobs/rocoto/landanlinit.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="landanlinit" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS_INITIALIZE" +status=$? +exit "${status}" diff --git a/jobs/rocoto/landanlrun.sh b/jobs/rocoto/landanlrun.sh new file mode 100755 index 0000000000..3f306a32be --- /dev/null +++ b/jobs/rocoto/landanlrun.sh @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="landanlrun" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_LAND_ANALYSIS_RUN" +status=$? +exit "${status}" diff --git a/jobs/rocoto/makeinit_link.sh b/jobs/rocoto/makeinit_link.sh index 0516d9b305..d689e63ff9 100755 --- a/jobs/rocoto/makeinit_link.sh +++ b/jobs/rocoto/makeinit_link.sh @@ -1,7 +1,11 @@ #!/bin/sh - +## ## this script makes a link to $ICSDIR/YYYYMMDDHH/gfs//INPUT -## /scratch4/BMC/rtfim/rtruns/FV3ICS/YYYYMMDDHH/gfs/C384|C768/INPUT +## +## md ${ROTDIR}/${CDUMP}.${yyyymmdd}/${hh}/model_data/${COMPONENT} +## cd ${ROTDIR}/${CDUMP}.${yyyymmdd}/${hh}/model_data/${COMPONENT} +## ln -s /scratch4/BMC/rtfim/rtruns/FV3ICS/YYYYMMDDHH/gfs/C768/INPUT input +## ## echo @@ -17,7 +21,7 @@ echo yyyymmdd=`echo $CDATE | cut -c1-8` hh=`echo $CDATE | cut -c9-10` init_dir=$ICSDIR/${CDATE}/${CDUMP}/${CASE} -outdir=${ROTDIR}/${CDUMP}.${yyyymmdd}/${hh}/${COMPONENT} +outdir=${ROTDIR}/${CDUMP}.${yyyymmdd}/${hh}/model_data/${COMPONENT} ## create link to FV3ICS directory if [[ ! -d $outdir ]]; then @@ -30,6 +34,6 @@ if [[ ! -d $outdir ]]; then fi cd $outdir echo "making link to FV3ICS directory: $init_dir/INPUT" -ln -fs $init_dir/INPUT +ln -fs $init_dir/INPUT input status=$? exit $status diff --git a/jobs/rocoto/metp.sh b/jobs/rocoto/metp.sh index e18d35fc20..82254a0435 100755 --- a/jobs/rocoto/metp.sh +++ b/jobs/rocoto/metp.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### ## Abstract: @@ -17,60 +19,78 @@ ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="metp${METPCASE}" +export jobid="${job}.$$" + +############################################## +# make temp directory +############################################## +export DATA=${DATA:-${DATAROOT}/${jobid}} +mkdir -p ${DATA} +cd ${DATA} + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +. ./PDY ############################################################### echo echo "=============== START TO SOURCE RELEVANT CONFIGS ===============" configs="base metp" -for config in $configs; do - . $EXPDIR/config.${config} +for config in ${configs}; do + . ${EXPDIR}/config.${config} status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done ############################################################### echo echo "=============== START TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. $BASE_ENV/${machine}.env metp +. ${BASE_ENV}/${machine}.env metp status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### -export COMPONENT=${COMPONENT:-atmos} -export VDATE="$(echo $($NDATE -${VRFYBACK_HRS} $CDATE) | cut -c1-8)" - -export pid=${pid:-$$} -export jobid=${job}.${pid} -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/metp.${jobid}" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT +export COMPONENT="atmos" +export VDATE="$(echo $(${NDATE} -${VRFYBACK_HRS} ${CDATE}) | cut -c1-8)" +export COMIN="${ROTDIR}/${CDUMP}.${PDY}/${cyc}/${COMPONENT}" +# TODO: This should not be permitted as DATAROOT is set at the job-card level. +# TODO: DATAROOT is being used as DATA in metp jobs. This should be rectified in metp. +# TODO: The temporary directory is DATA and is created at the top of the J-Job. +# TODO: remove this line +export DATAROOT=${DATA} ############################################################### echo echo "=============== START TO RUN METPLUS VERIFICATION ===============" -if [ $CDUMP = "gfs" ]; then +if [ ${CDUMP} = "gfs" ]; then - if [ $RUN_GRID2GRID_STEP1 = "YES" -o $RUN_GRID2OBS_STEP1 = "YES" -o $RUN_PRECIP_STEP1 = "YES" ]; then + if [ ${RUN_GRID2GRID_STEP1} = "YES" -o ${RUN_GRID2OBS_STEP1} = "YES" -o ${RUN_PRECIP_STEP1} = "YES" ]; then - $VERIF_GLOBALSH + ${VERIF_GLOBALSH} status=$? - [[ $status -ne 0 ]] && exit $status - [[ $status -eq 0 ]] && echo "Succesfully ran $VERIF_GLOBALSH" + [[ ${status} -ne 0 ]] && exit ${status} + [[ ${status} -eq 0 ]] && echo "Succesfully ran ${VERIF_GLOBALSH}" fi fi -if [ $CDUMP = "gdas" ]; then +if [ ${CDUMP} = "gdas" ]; then echo "METplus verification currently not supported for CDUMP=${CDUMP}" fi ############################################################### # Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi +if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf ${DATAROOT} ; fi # TODO: This should be $DATA + + exit 0 diff --git a/jobs/rocoto/ocnanalbmat.sh b/jobs/rocoto/ocnanalbmat.sh new file mode 100755 index 0000000000..e62db9115a --- /dev/null +++ b/jobs/rocoto/ocnanalbmat.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ "${status}" -ne 0 ]] && exit "${status}" + +export job="ocnanalbmat" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_BMAT +echo "BMAT gets run here" +status=$? +exit "${status}" diff --git a/jobs/rocoto/ocnanalchkpt.sh b/jobs/rocoto/ocnanalchkpt.sh new file mode 100755 index 0000000000..ae98bc8e88 --- /dev/null +++ b/jobs/rocoto/ocnanalchkpt.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="ocnanalchkpt" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_CHKPT +status=$? +exit "${status}" diff --git a/jobs/rocoto/ocnanalpost.sh b/jobs/rocoto/ocnanalpost.sh new file mode 100755 index 0000000000..b99a4e05ca --- /dev/null +++ b/jobs/rocoto/ocnanalpost.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="ocnanalpost" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_POST +status=$? +exit "${status}" diff --git a/jobs/rocoto/ocnanalprep.sh b/jobs/rocoto/ocnanalprep.sh new file mode 100755 index 0000000000..3830fe1c39 --- /dev/null +++ b/jobs/rocoto/ocnanalprep.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="ocnanalprep" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_PREP +status=$? +exit "${status}" diff --git a/jobs/rocoto/ocnanalrun.sh b/jobs/rocoto/ocnanalrun.sh new file mode 100755 index 0000000000..5f998af989 --- /dev/null +++ b/jobs/rocoto/ocnanalrun.sh @@ -0,0 +1,18 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="ocnanalrun" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}"/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_RUN +status=$? +exit "${status}" diff --git a/jobs/rocoto/ocnanalvrfy.sh b/jobs/rocoto/ocnanalvrfy.sh new file mode 100755 index 0000000000..d8e9bbb805 --- /dev/null +++ b/jobs/rocoto/ocnanalvrfy.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +export STRICT="NO" +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" --eva +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="ocnanalvrfy" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGDAS_GLOBAL_OCEAN_ANALYSIS_VRFY" +status=$? +exit "${status}" diff --git a/jobs/rocoto/ocnpost.sh b/jobs/rocoto/ocnpost.sh index ad42680dba..ee8da061f2 100755 --- a/jobs/rocoto/ocnpost.sh +++ b/jobs/rocoto/ocnpost.sh @@ -1,158 +1,122 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### -## CICE5/MOM6 post driver script +## CICE5/MOM6 post driver script ## FHRGRP : forecast hour group to post-process (e.g. 0, 1, 2 ...) ## FHRLST : forecast hourlist to be post-process (e.g. anl, f000, f000_f001_f002, ...) ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################# -# Source relevant config files -############################# -configs="base ocnpost" -config_path=${EXPDIR:-$NWROOT/gfs.${gfs_ver}/parm/config} -for config in $configs; do - . $config_path/config.$config - status=$? - [[ $status -ne 0 ]] && exit $status -done - - -########################################## -# Source machine runtime environment -########################################## -. $HOMEgfs/env/${machine}.env ocnpost +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ $status -ne 0 ]] && exit $status - - -############################################## -# Obtain unique process id (pid) and make temp directory -############################################## -export job=${job:-"ocnpost"} -export pid=${pid:-$$} -export outid=${outid:-"LL$job"} -export jobid=${jobid:-"${outid}.o${pid}"} - -export DATAROOT="$RUNDIR/$CDATE/$CDUMP" -[[ ! -d $DATAROOT ]] && mkdir -p $DATAROOT - -export DATA="$DATAROOT/${job}.${pid}" -# DATA dir not used for now. - -[[ -d $DATA ]] && rm -rf $DATA -mkdir -p $DATA -cd $DATA - -############################################## -# Run setpdy and initialize PDY variables -############################################## -export cycle="t${cyc}z" -setpdy.sh -. ./PDY - -############################################## -# Define the Log File directory -############################################## -export jlogfile=${jlogfile:-$COMROOT/logs/jlogfiles/jlogfile.${job}.${pid}} - -############################################## -# Determine Job Output Name on System -############################################## -export pgmout="OUTPUT.${pid}" -export pgmerr=errfile +(( status != 0 )) && exit "${status}" +export job="ocnpost" +export jobid="${job}.$$" +source "${HOMEgfs}/ush/jjob_header.sh" -e "ocnpost" -c "base ocnpost" ############################################## # Set variables used in the exglobal script ############################################## -export CDATE=${CDATE:-${PDY}${cyc}} -export CDUMP=${CDUMP:-${RUN:-"gfs"}} -if [ $RUN_ENVIR = "nco" ]; then - export ROTDIR=${COMROOT:?}/$NET/$envir +export CDUMP=${RUN/enkf} +if [[ ${RUN_ENVIR} = "nco" ]]; then + export ROTDIR=${COMROOT:?}/${NET}/${envir} fi ############################################## # Begin JOB SPECIFIC work ############################################## -[[ ! -d $COMOUTocean ]] && mkdir -p $COMOUTocean -[[ ! -d $COMOUTice ]] && mkdir -p $COMOUTice +YMD=${PDY} HH=${cyc} generate_com -rx COM_OCEAN_HISTORY COM_OCEAN_2D COM_OCEAN_3D \ + COM_OCEAN_XSECT COM_ICE_HISTORY -fhrlst=$(echo $FHRLST | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') +for grid in "0p50" "0p25"; do + YMD=${PDY} HH=${cyc} GRID=${grid} generate_com -rx "COM_OCEAN_GRIB_${grid}:COM_OCEAN_GRIB_TMPL" +done + +for outdir in COM_OCEAN_2D COM_OCEAN_3D COM_OCEAN_XSECT COM_OCEAN_GRIB_0p25 COM_OCEAN_GRIB_0p50; do + if [[ ! -d "${!outdir}" ]]; then + mkdir -p "${!outdir}" + fi +done + +fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') export OMP_NUM_THREADS=1 export ENSMEM=${ENSMEM:-01} -export IDATE=$CDATE - -for fhr in $fhrlst; do - export fhr=$fhr - VDATE=$($NDATE $fhr $IDATE) - # Regrid the MOM6 and CICE5 output from tripolar to regular grid via NCL - # This can take .25 degree input and convert to .5 degree - other opts avail - # The regrid scripts use CDATE for the current day, restore it to IDATE afterwards - export CDATE=$VDATE - cd $DATA - if [ $fhr -gt 0 ]; then - export MOM6REGRID=${MOM6REGRID:-$HOMEgfs} - $MOM6REGRID/scripts/run_regrid.sh - status=$? - [[ $status -ne 0 ]] && exit $status - - # Convert the netcdf files to grib2 - export executable=$MOM6REGRID/exec/reg2grb2.x - $MOM6REGRID/scripts/run_reg2grb2.sh - status=$? - [[ $status -ne 0 ]] && exit $status - - - #break up ocn netcdf into multiple files: - if [ -f $COMOUTocean/ocn_2D_$VDATE.$ENSMEM.$IDATE.nc ]; then - echo "File $COMOUTocean/ocn_2D_$VDATE.$ENSMEM.$IDATE.nc already exists" +export IDATE=${PDY}${cyc} + +for fhr in ${fhrlst}; do + export fhr=${fhr} + # Ignore possible spelling error (nothing is misspelled) + # shellcheck disable=SC2153 + VDATE=$(${NDATE} "${fhr}" "${IDATE}") + # shellcheck disable= + declare -x VDATE + cd "${DATA}" || exit 2 + if (( fhr > 0 )); then + # TODO: This portion calls NCL scripts that are deprecated (see Issue #923) + if [[ "${MAKE_OCN_GRIB:-YES}" == "YES" ]]; then + export MOM6REGRID=${MOM6REGRID:-${HOMEgfs}} + "${MOM6REGRID}/scripts/run_regrid.sh" + status=$? + [[ ${status} -ne 0 ]] && exit "${status}" + + # Convert the netcdf files to grib2 + export executable=${MOM6REGRID}/exec/reg2grb2.x + "${MOM6REGRID}/scripts/run_reg2grb2.sh" + status=$? + [[ ${status} -ne 0 ]] && exit "${status}" + ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2" "${COM_OCEAN_GRIB_0p25}/" + ${NMV} "ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2" "${COM_OCEAN_GRIB_0p50}/" + fi + + #break up ocn netcdf into multiple files: + if [[ -f "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then + echo "File ${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" else - ncks -x -v vo,uo,so,temp $COMOUTocean/ocn$VDATE.$ENSMEM.$IDATE.nc $COMOUTocean/ocn_2D_$VDATE.$ENSMEM.$IDATE.nc + ncks -x -v vo,uo,so,temp \ + "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \ + "${COM_OCEAN_2D}/ocn_2D_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? - [[ $status -ne 0 ]] && exit $status - fi - if [ -f $COMOUTocean/ocn_3D_$VDATE.$ENSMEM.$IDATE.nc ]; then - echo "File $COMOUTocean/ocn_3D_$VDATE.$ENSMEM.$IDATE.nc already exists" - else - ncks -x -v Heat_PmE,LW,LwLatSens,MLD_003,MLD_0125,SSH,SSS,SST,SSU,SSV,SW,cos_rot,ePBL,evap,fprec,frazil,latent,lprec,lrunoff,sensible,sin_rot,speed,taux,tauy,wet_c,wet_u,wet_v $COMOUTocean/ocn$VDATE.$ENSMEM.$IDATE.nc $COMOUTocean/ocn_3D_$VDATE.$ENSMEM.$IDATE.nc + [[ ${status} -ne 0 ]] && exit "${status}" + fi + if [[ -f "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then + echo "File ${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + else + ncks -x -v Heat_PmE,LW,LwLatSens,MLD_003,MLD_0125,SSH,SSS,SST,SSU,SSV,SW,cos_rot,ePBL,evap,fprec,frazil,latent,lprec,lrunoff,sensible,sin_rot,speed,taux,tauy,wet_c,wet_u,wet_v \ + "${COM_OCEAN_HISTORY}/ocn${VDATE}.${ENSMEM}.${IDATE}.nc" \ + "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? - [[ $status -ne 0 ]] && exit $status - fi - if [ -f $COMOUTocean/ocn-temp-EQ_$VDATE.$ENSMEM.$IDATE.nc ]; then - echo "File $COMOUTocean/ocn-temp-EQ_$VDATE.$ENSMEM.$IDATE.nc already exists" - else - ncks -v temp -d yh,503 -d xh,-299.92,60.03 $COMOUTocean/ocn_3D_$VDATE.$ENSMEM.$IDATE.nc $COMOUTocean/ocn-temp-EQ_$VDATE.$ENSMEM.$IDATE.nc + [[ ${status} -ne 0 ]] && exit "${status}" + fi + if [[ -f "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then + echo "File ${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + else + ncks -v temp -d yh,503 -d xh,-299.92,60.03 \ + "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \ + "${COM_OCEAN_XSECT}/ocn-temp-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? - [[ $status -ne 0 ]] && exit $status - fi - if [ -f $COMOUTocean/ocn-uo-EQ_$VDATE.$ENSMEM.$IDATE.nc ]; then - echo "File $COMOUTocean/ocn-uo-EQ_$VDATE.$ENSMEM.$IDATE.nc already exists" - else - ncks -v uo -d yh,503 -d xh,-299.92,60.03 $COMOUTocean/ocn_3D_$VDATE.$ENSMEM.$IDATE.nc $COMOUTocean/ocn-uo-EQ_$VDATE.$ENSMEM.$IDATE.nc + [[ ${status} -ne 0 ]] && exit "${status}" + fi + if [[ -f "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" ]]; then + echo "File ${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc already exists" + else + ncks -v uo -d yh,503 -d xh,-299.92,60.03 \ + "${COM_OCEAN_3D}/ocn_3D_${VDATE}.${ENSMEM}.${IDATE}.nc" \ + "${COM_OCEAN_XSECT}/ocn-uo-EQ_${VDATE}.${ENSMEM}.${IDATE}.nc" status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit "${status}" fi fi - done -# Restore CDATE to what is expected -export CDATE=$IDATE -$NMV ocn_ice*.grb2 $COMOUTocean/ -status=$? -[[ $status -ne 0 ]] && exit $status - # clean up working folder -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATA ; fi +if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi ############################################################### # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/post.sh b/jobs/rocoto/post.sh index 5747102451..e84b2b7b71 100755 --- a/jobs/rocoto/post.sh +++ b/jobs/rocoto/post.sh @@ -1,4 +1,6 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### ## NCEP post driver script @@ -7,44 +9,25 @@ ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -export COMPONENT=${COMPONENT:-atmos} +export job="post" +export jobid="${job}.$$" -if [ $FHRGRP = 'anl' ]; then +if [ ${FHRGRP} = 'anl' ]; then fhrlst="anl" - restart_file=$ROTDIR/${CDUMP}.${PDY}/${cyc}/$COMPONENT/${CDUMP}.t${cyc}z.atm else - fhrlst=$(echo $FHRLST | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') - restart_file=$ROTDIR/${CDUMP}.${PDY}/${cyc}/$COMPONENT/${CDUMP}.t${cyc}z.logf + fhrlst=$(echo ${FHRLST} | sed -e 's/_/ /g; s/f/ /g; s/,/ /g') fi - #--------------------------------------------------------------- -for fhr in $fhrlst; do - - if [ ! -f $restart_file${fhr}.nemsio -a ! -f $restart_file${fhr}.nc -a ! -f $restart_file${fhr}.txt ]; then - echo "Nothing to process for FHR = $fhr, cycle, wait for 5 minutes" - sleep 300 - fi - if [ ! -f $restart_file${fhr}.nemsio -a ! -f $restart_file${fhr}.nc -a ! -f $restart_file${fhr}.txt ]; then - echo "Nothing to process for FHR = $fhr, cycle, skip" - continue - fi - - #master=$ROTDIR/${CDUMP}.${PDY}/${cyc}/$COMPONENT/${CDUMP}.t${cyc}z.master.grb2f${fhr} - pgb0p25=$ROTDIR/${CDUMP}.${PDY}/${cyc}/$COMPONENT/${CDUMP}.t${cyc}z.pgrb2.0p25.f${fhr} -#JKH if [ ! -s $pgb0p25 ]; then - export post_times=$fhr - $HOMEgfs/jobs/JGLOBAL_ATMOS_NCEPPOST - status=$? - [[ $status -ne 0 ]] && exit $status -#JKH fi - +for fhr in ${fhrlst}; do + export post_times=${fhr} + ${HOMEgfs}/jobs/JGLOBAL_ATMOS_POST + status=$? + [[ ${status} -ne 0 ]] && exit ${status} done -############################################################### -# Exit out cleanly exit 0 diff --git a/jobs/rocoto/postsnd.sh b/jobs/rocoto/postsnd.sh index 5472a9206b..bc274361db 100755 --- a/jobs/rocoto/postsnd.sh +++ b/jobs/rocoto/postsnd.sh @@ -1,16 +1,22 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} +export job="postsnd" +export jobid="${job}.$$" ############################################################### # Execute the JJOB -$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +${HOMEgfs}/jobs/JGFS_ATMOS_POSTSND status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/prep.sh b/jobs/rocoto/prep.sh index c9ddcd8aa4..826dec5ae7 100755 --- a/jobs/rocoto/prep.sh +++ b/jobs/rocoto/prep.sh @@ -1,129 +1,137 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### # Source FV3GFS workflow modules -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### -# Source relevant configs -configs="base prep prepbufr" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done +export job="prep" +export jobid="${job}.$$" +source "${HOMEgfs}/ush/jjob_header.sh" -e "prep" -c "base prep" -############################################################### -# Source machine runtime environment -. $BASE_ENV/${machine}.env prep -status=$? -[[ $status -ne 0 ]] && exit $status +export CDUMP="${RUN/enkf}" ############################################################### # Set script and dependency variables -export COMPONENT=${COMPONENT:-atmos} +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +# shellcheck disable= +gPDY=${GDATE:0:8} +gcyc=${GDATE:8:2} +GDUMP="gdas" + export OPREFIX="${CDUMP}.t${cyc}z." -export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -[[ ! -d $COMOUT ]] && mkdir -p $COMOUT + +YMD=${PDY} HH=${cyc} DUMP=${CDUMP} generate_com -rx COM_OBS COM_OBSDMP + +RUN=${GDUMP} DUMP=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + COM_OBS_PREV:COM_OBS_TMPL \ + COM_OBSDMP_PREV:COM_OBSDMP_TMPL + +export MAKE_PREPBUFR=${MAKE_PREPBUFR:-"YES"} +if [[ ! -d "${COM_OBS}" ]]; then mkdir -p "${COM_OBS}"; fi ############################################################### # If ROTDIR_DUMP=YES, copy dump files to rotdir -if [ $ROTDIR_DUMP = "YES" ]; then - $HOMEgfs/ush/getdump.sh $CDATE $CDUMP $DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc} $COMOUT +if [[ ${ROTDIR_DUMP} = "YES" ]]; then + "${HOMEgfs}/ush/getdump.sh" "${PDY}${cyc}" "${CDUMP}" "${COM_OBSDMP}" "${COM_OBS}" status=$? - [[ $status -ne 0 ]] && exit $status - -# Ensure previous cycle gdas dumps are available (used by cycle & downstream) - GDATE=$($NDATE -$assim_freq $CDATE) - gPDY=$(echo $GDATE | cut -c1-8) - gcyc=$(echo $GDATE | cut -c9-10) - GDUMP=gdas - gCOMOUT="$ROTDIR/$GDUMP.$gPDY/$gcyc/$COMPONENT" - if [ ! -s $gCOMOUT/$GDUMP.t${gcyc}z.updated.status.tm00.bufr_d ]; then - $HOMEgfs/ush/getdump.sh $GDATE $GDUMP $DMPDIR/${GDUMP}${DUMP_SUFFIX}.${gPDY}/${gcyc} $gCOMOUT + [[ ${status} -ne 0 ]] && exit ${status} + + # Ensure previous cycle gdas dumps are available (used by cycle & downstream) + if [[ ! -s "${COM_OBS_PREV}/${GDUMP}.t${gcyc}z.updated.status.tm00.bufr_d" ]]; then + "${HOMEgfs}/ush/getdump.sh" "${GDATE}" "${GDUMP}" "${COM_OBSDMP_PREV}" "${COM_OBS_PREV}" status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} fi - + # exception handling to ensure no dead link + [[ $(find ${COM_OBS} -xtype l | wc -l) -ge 1 ]] && exit 9 + [[ $(find ${COM_OBS_PREV} -xtype l | wc -l) -ge 1 ]] && exit 9 fi + ############################################################### ############################################################### -# For running real-time parallels on WCOSS_C, execute tropcy_qc and +# For running real-time parallels, execute tropcy_qc and # copy files from operational syndata directory to a local directory. # Otherwise, copy existing tcvital data from globaldump. -if [ $PROCESS_TROPCY = "YES" ]; then +if [[ ${PROCESS_TROPCY} = "YES" ]]; then export COMINsyn=${COMINsyn:-$(compath.py gfs/prod/syndat)} - if [ $RUN_ENVIR != "nco" ]; then + if [[ ${RUN_ENVIR} != "nco" ]]; then export ARCHSYND=${ROTDIR}/syndat - if [ ! -d ${ARCHSYND} ]; then mkdir -p $ARCHSYND; fi - if [ ! -s $ARCHSYND/syndat_akavit ]; then + if [[ ! -d ${ARCHSYND} ]]; then mkdir -p ${ARCHSYND}; fi + if [[ ! -s ${ARCHSYND}/syndat_akavit ]]; then for file in syndat_akavit syndat_dateck syndat_stmcat.scr syndat_stmcat syndat_sthisto syndat_sthista ; do - cp $COMINsyn/$file $ARCHSYND/. + cp ${COMINsyn}/${file} ${ARCHSYND}/. done fi fi - [[ $ROTDIR_DUMP = "YES" ]] && rm $COMOUT${CDUMP}.t${cyc}z.syndata.tcvitals.tm00 + if [[ ${ROTDIR_DUMP} = "YES" ]]; then rm "${COM_OBS}/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00"; fi - $HOMEgfs/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC + "${HOMEgfs}/jobs/JGLOBAL_ATMOS_TROPCY_QC_RELOC" status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} else - [[ $ROTDIR_DUMP = "NO" ]] && cp $DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00 $COMOUT/ + if [[ ${ROTDIR_DUMP} = "NO" ]]; then cp "${COM_OBSDMP}/${CDUMP}.t${cyc}z.syndata.tcvitals.tm00" "${COM_OBS}/"; fi fi ############################################################### # Generate prepbufr files from dumps or copy from OPS -if [ $DO_MAKEPREPBUFR = "YES" ]; then - if [ $ROTDIR_DUMP = "YES" ]; then - rm $COMOUT/${OPREFIX}prepbufr - rm $COMOUT/${OPREFIX}prepbufr.acft_profiles - rm $COMOUT/${OPREFIX}nsstbufr +if [[ ${MAKE_PREPBUFR} = "YES" ]]; then + if [[ ${ROTDIR_DUMP} = "YES" ]]; then + rm -f "${COM_OBS}/${OPREFIX}prepbufr" + rm -f "${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" + rm -f "${COM_OBS}/${OPREFIX}nsstbufr" fi export job="j${CDUMP}_prep_${cyc}" - export DATAROOT="$RUNDIR/$CDATE/$CDUMP/prepbufr" - #export COMIN=${COMIN:-$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT} - export COMIN=${COMIN:-$ROTDIR} - export COMINgdas=${COMINgdas:-$ROTDIR/gdas.$PDY/$cyc/$COMPONENT} - export COMINgfs=${COMINgfs:-$ROTDIR/gfs.$PDY/$cyc/$COMPONENT} - if [ $ROTDIR_DUMP = "NO" ]; then - COMIN_OBS=${COMIN_OBS:-$DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}} - export COMSP=${COMSP:-$COMIN_OBS/$CDUMP.t${cyc}z.} + export DATAROOT="${RUNDIR}/${CDATE}/${CDUMP}/prepbufr" + export COMIN=${COM_OBS} + export COMOUT=${COM_OBS} + RUN="gdas" YMD=${PDY} HH=${cyc} generate_com -rx COMINgdas:COM_ATMOS_HISTORY_TMPL + RUN="gfs" YMD=${PDY} HH=${cyc} generate_com -rx COMINgfs:COM_ATMOS_HISTORY_TMPL + if [[ ${ROTDIR_DUMP} = "NO" ]]; then + export COMSP=${COMSP:-"${COM_OBSDMP}/${CDUMP}.t${cyc}z."} else - export COMSP=${COMSP:-$ROTDIR/${CDUMP}.${PDY}/${cyc}/$COMPONENT/$CDUMP.t${cyc}z.} + export COMSP=${COMSP:-"${COM_OBS}/${CDUMP}.t${cyc}z."} fi + export COMSP=${COMSP:-${COMIN_OBS}/${CDUMP}.t${cyc}z.} # Disable creating NSSTBUFR if desired, copy from DMPDIR instead - if [[ ${DO_MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then + if [[ ${MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then export MAKE_NSSTBUFR="NO" fi - $HOMEobsproc_network/jobs/JGLOBAL_PREP + "${HOMEobsproc}/jobs/JOBSPROC_GLOBAL_PREP" status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} # If creating NSSTBUFR was disabled, copy from DMPDIR if appropriate. - if [[ ${DO_MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then - [[ $DONST = "YES" ]] && $NCP $DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/${OPREFIX}nsstbufr $COMOUT/${OPREFIX}nsstbufr + if [[ ${MAKE_NSSTBUFR:-"NO"} = "NO" ]]; then + if [[ ${DONST} = "YES" ]]; then ${NCP} "${COM_OBSDMP}/${OPREFIX}nsstbufr" "${COM_OBS}/${OPREFIX}nsstbufr"; fi fi else - if [ $ROTDIR_DUMP = "NO" ]; then - $NCP $DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/${OPREFIX}prepbufr $COMOUT/${OPREFIX}prepbufr - $NCP $DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/${OPREFIX}prepbufr.acft_profiles $COMOUT/${OPREFIX}prepbufr.acft_profiles - [[ $DONST = "YES" ]] && $NCP $DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/${OPREFIX}nsstbufr $COMOUT/${OPREFIX}nsstbufr + if [[ ${ROTDIR_DUMP} = "NO" ]]; then + ${NCP} "${COM_OBSDMP}/${OPREFIX}prepbufr" "${COM_OBS}/${OPREFIX}prepbufr" + ${NCP} "${COM_OBSDMP}/${OPREFIX}prepbufr.acft_profiles" "${COM_OBS}/${OPREFIX}prepbufr.acft_profiles" + if [[ ${DONST} = "YES" ]]; then ${NCP} "${COM_OBSDMP}/${OPREFIX}nsstbufr" "${COM_OBS}/${OPREFIX}nsstbufr"; fi fi fi ################################################################################ # Exit out cleanly + + exit 0 diff --git a/jobs/rocoto/preplandobs.sh b/jobs/rocoto/preplandobs.sh new file mode 100755 index 0000000000..6fcd659eae --- /dev/null +++ b/jobs/rocoto/preplandobs.sh @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source UFSDA workflow modules +. "${HOMEgfs}/ush/load_ufsda_modules.sh" +status=$? +[[ ${status} -ne 0 ]] && exit "${status}" + +export job="preplandobs" +export jobid="${job}.$$" + +############################################################### +# setup python path for workflow utilities and tasks +pygwPATH="${HOMEgfs}/ush/python:${HOMEgfs}/ush/python/pygw/src" +gdasappPATH="${HOMEgfs}/sorc/gdas.cd/iodaconv/src:${HOMEgfs}/sorc/gdas.cd/build/lib/python3.7/pyioda" +PYTHONPATH="${PYTHONPATH:+${PYTHONPATH}:}${pygwPATH}:${gdasappPATH}" +export PYTHONPATH + +############################################################### +# Execute the JJOB +"${HOMEgfs}/jobs/JGLOBAL_PREP_LAND_OBS" +status=$? +exit "${status}" diff --git a/jobs/rocoto/sfcanl.sh b/jobs/rocoto/sfcanl.sh new file mode 100755 index 0000000000..44f93ee0c3 --- /dev/null +++ b/jobs/rocoto/sfcanl.sh @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################################### +# Source FV3GFS workflow modules +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh +status=$? +[[ ${status} -ne 0 ]] && exit ${status} + +export job="sfcanl" +export jobid="${job}.$$" + +############################################################### +# Execute the JJOB +${HOMEgfs}/jobs/JGLOBAL_ATMOS_SFCANL +status=$? + + +exit ${status} diff --git a/jobs/rocoto/vrfy_emc.sh b/jobs/rocoto/vrfy_emc.sh index 9eee5c13c0..93d9f11c44 100755 --- a/jobs/rocoto/vrfy_emc.sh +++ b/jobs/rocoto/vrfy_emc.sh @@ -1,137 +1,74 @@ -#!/bin/bash -x +#! /usr/bin/env bash -############################################################### -## Abstract: -## Inline verification and diagnostics driver script -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current analysis date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### +source "${HOMEgfs}/ush/preamble.sh" ############################################################### -echo -echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ $status -ne 0 ]] && exit $status - +(( status != 0 )) && exit "${status}" -############################################################### -echo -echo "=============== START TO SOURCE RELEVANT CONFIGS ===============" -configs="base vrfy" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done +export job="vrfy" +export jobid="${job}.$$" +# TODO (#235) - This job is calling multiple j-jobs and doing too much in general +# Also, this forces us to call the config files here instead of the j-job +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" ############################################################### -echo -echo "=============== START TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. $BASE_ENV/${machine}.env vrfy -status=$? -[[ $status -ne 0 ]] && exit $status +export CDUMP="${RUN/enkf}" -############################################################### -export COMPONENT=${COMPONENT:-atmos} -export CDATEm1=$($NDATE -24 $CDATE) -export PDYm1=$(echo $CDATEm1 | cut -c1-8) +CDATEm1=$(${NDATE} -24 "${PDY}${cyc}") +export CDATEm1 +export PDYm1=${CDATEm1:0:8} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/vrfy.${jobid}" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT +CDATEm1c=$(${NDATE} -06 "${PDY}${cyc}") +PDYm1c=${CDATEm1c:0:8} +pcyc=${CDATEm1c:8:2} ############################################################### +# TODO: We can likely drop support for these dev-only grib1 precip files echo echo "=============== START TO GENERATE QUARTER DEGREE GRIB1 FILES ===============" -if [ $MKPGB4PRCP = "YES" -a $CDUMP = "gfs" ]; then - if [ ! -d $ARCDIR ]; then mkdir $ARCDIR ; fi +if [[ ${MKPGB4PRCP} = "YES" && ${CDUMP} == "gfs" ]]; then + YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_MASTER + if [ ! -d ${ARCDIR} ]; then mkdir -p ${ARCDIR} ; fi nthreads_env=${OMP_NUM_THREADS:-1} # get threads set in env export OMP_NUM_THREADS=1 - cd $COMIN - fhmax=${vhr_rain:-$FHMAX_GFS} - fhr=0 - while [ $fhr -le $fhmax ]; do - fhr2=$(printf %02i $fhr) - fhr3=$(printf %03i $fhr) - fname=${CDUMP}.t${cyc}z.sfluxgrbf$fhr3.grib2 - fileout=$ARCDIR/pgbq${fhr2}.${CDUMP}.${CDATE}.grib2 - $WGRIB2 $fname -match "(:PRATE:surface:)|(:TMP:2 m above ground:)" -grib $fileout - (( fhr = $fhr + 6 )) + cd "${COM_ATMOS_MASTER}" || exit 9 + fhmax=${vhr_rain:-${FHMAX_GFS}} + for (( fhr=0; fhr <= fhmax; fhr+=6 )); do + fhr2=$(printf %02i "${fhr}") + fhr3=$(printf %03i "${fhr}") + fname=${RUN}.t${cyc}z.sfluxgrbf${fhr3}.grib2 + fileout=${ARCDIR}/pgbq${fhr2}.${RUN}.${PDY}${cyc}.grib2 + ${WGRIB2} "${fname}" -match "(:PRATE:surface:)|(:TMP:2 m above ground:)" -grib "${fileout}" done - export OMP_NUM_THREADS=$nthreads_env # revert to threads set in env + export OMP_NUM_THREADS=${nthreads_env} # revert to threads set in env fi ############################################################### echo echo "=============== START TO RUN MOS ===============" -if [ $RUNMOS = "YES" -a $CDUMP = "gfs" ]; then - $RUNGFSMOSSH $PDY$cyc -fi - - -############################################################### -echo -echo "=============== START TO RUN FIT2OBS VERIFICATION ===============" -if [ $VRFYFITS = "YES" -a $CDUMP = $CDFNL -a $CDATE != $SDATE ]; then - - export CDUMPFCST=$VDUMP - export TMPDIR="$RUNDIR/$CDATE/$CDUMP" - [[ ! -d $TMPDIR ]] && mkdir -p $TMPDIR - - xdate=$($NDATE -${VBACKUP_FITS} $CDATE) - - - export RUN_ENVIR_SAVE=$RUN_ENVIR - export RUN_ENVIR=$OUTPUT_FILE - - $PREPQFITSH $PSLOT $xdate $ROTDIR $ARCDIR $TMPDIR - - export RUN_ENVIR=$RUN_ENVIR_SAVE - +if [[ "${RUNMOS}" == "YES" && "${CDUMP}" == "gfs" ]]; then + ${RUNGFSMOSSH} "${PDY}${cyc}" fi -############################################################### -echo -echo "=============== START TO RUN VSDB STEP1, VERIFY PRCIP AND GRID2OBS ===============" -if [ $CDUMP = "gfs" ]; then - - if [ $VSDB_STEP1 = "YES" -o $VRFYPRCP = "YES" -o $VRFYG2OBS = "YES" ]; then - - xdate=$(echo $($NDATE -${BACKDATEVSDB} $CDATE) | cut -c1-8) - export ARCDIR1="$NOSCRUB/archive" - export rundir="$RUNDIR/$CDUMP/$CDATE/vrfy/vsdb_exp" - export COMROT="$ARCDIR1/dummy" - - $VSDBJOBSH $VSDBSH $xdate $vlength $cyc $PSLOT $CDATE $CDUMP $gfs_cyc $rain_bucket $machine - fi -fi - ############################################################### echo echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" -if [ $VRFYRAD = "YES" -a $CDUMP = $CDFNL -a $CDATE != $SDATE ]; then - export EXP=$PSLOT - export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" - export jlogfile="$ROTDIR/logs/$CDATE/${CDUMP}radmon.log" - export TANKverf_rad="$TANKverf/stats/$PSLOT/$CDUMP.$PDY" - export TANKverf_radM1="$TANKverf/stats/$PSLOT/$CDUMP.$PDYm1" - export MY_MACHINE=$machine +if [[ "${VRFYRAD}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then - $VRFYRADSH + export EXP=${PSLOT} + export TANKverf_rad="${TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export TANKverf_radM1="${TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" + export MY_MACHINE=${machine} + + ${VRFYRADSH} fi @@ -139,16 +76,14 @@ fi ############################################################### echo echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" -if [ $VRFYOZN = "YES" -a $CDUMP = $CDFNL -a $CDATE != $SDATE ]; then +if [[ "${VRFYOZN}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then - export EXP=$PSLOT - export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" - export jlogfile="$ROTDIR/logs/$CDATE/${CDUMP}oznmon.log" - export TANKverf_ozn="$TANKverf_ozn/stats/$PSLOT/$CDUMP.$PDY" - export TANKverf_oznM1="$TANKverf_ozn/stats/$PSLOT/$CDUMP.$PDYm1" - export MY_MACHINE=$machine + export EXP=${PSLOT} + export TANKverf_ozn="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export TANKverf_oznM1="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" + export MY_MACHINE=${machine} - $VRFYOZNSH + ${VRFYOZNSH} fi @@ -156,15 +91,13 @@ fi ############################################################### echo echo "=============== START TO RUN MINMON ===============" -if [ $VRFYMINMON = "YES" -a $CDATE != $SDATE ]; then +if [[ "${VRFYMINMON}" == "YES" && "${PDY}${cyc}" != "${SDATE}" ]]; then - export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" - export jlogfile="$ROTDIR/logs/$CDATE/${CDUMP}minmon.log" - export M_TANKverfM0="$M_TANKverf/stats/$PSLOT/$CDUMP.$PDY" - export M_TANKverfM1="$M_TANKverf/stats/$PSLOT/$CDUMP.$PDYm1" - export MY_MACHINE=$machine + export M_TANKverfM0="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export M_TANKverfM1="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" + export MY_MACHINE=${machine} - $VRFYMINSH + ${VRFYMINSH} fi @@ -172,28 +105,35 @@ fi ################################################################################ echo echo "=============== START TO RUN CYCLONE TRACK VERIFICATION ===============" -if [ $VRFYTRAK = "YES" ]; then - $TRACKERSH +if [[ ${VRFYTRAK} = "YES" ]]; then + + COMINsyn=${COMINsyn:-$(compath.py "${envir}/com/gfs/${gfs_ver}")/syndat} + export COMINsyn + + ${TRACKERSH} fi ################################################################################ echo echo "=============== START TO RUN CYCLONE GENESIS VERIFICATION ===============" -if [ $VRFYGENESIS = "YES" -a $CDUMP = "gfs" ]; then - $GENESISSH +if [[ ${VRFYGENESIS} = "YES" && "${CDUMP}" = "gfs" ]]; then + ${GENESISSH} fi ################################################################################ echo echo "=============== START TO RUN CYCLONE GENESIS VERIFICATION (FSU) ===============" -if [ $VRFYFSU = "YES" -a $CDUMP = "gfs" ]; then - $GENESISFSU +if [[ ${VRFYFSU} = "YES" && "${CDUMP}" = "gfs" ]]; then + ${GENESISFSU} fi ############################################################### # Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi +cd "${DATAROOT}" +if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi + + exit 0 diff --git a/jobs/rocoto/vrfy_gsl.sh b/jobs/rocoto/vrfy_gsl.sh index 666fbfab5b..3efc09444d 100755 --- a/jobs/rocoto/vrfy_gsl.sh +++ b/jobs/rocoto/vrfy_gsl.sh @@ -1,137 +1,74 @@ -#!/bin/bash -x +#! /usr/bin/env bash -############################################################### -## Abstract: -## Inline verification and diagnostics driver script -## RUN_ENVIR : runtime environment (emc | nco) -## HOMEgfs : /full/path/to/workflow -## EXPDIR : /full/path/to/config/files -## CDATE : current analysis date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -## PDY : current date (YYYYMMDD) -## cyc : current cycle (HH) -############################################################### +source "${HOMEgfs}/ush/preamble.sh" ############################################################### -echo -echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +# Source FV3GFS workflow modules +source "${HOMEgfs}/ush/load_fv3gfs_modules.sh" status=$? -[[ $status -ne 0 ]] && exit $status - +(( status != 0 )) && exit "${status}" -############################################################### -echo -echo "=============== START TO SOURCE RELEVANT CONFIGS ===============" -configs="base vrfy" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done +export job="vrfy" +export jobid="${job}.$$" +# TODO (#235) - This job is calling multiple j-jobs and doing too much in general +# Also, this forces us to call the config files here instead of the j-job +source "${HOMEgfs}/ush/jjob_header.sh" -e "vrfy" -c "base vrfy" ############################################################### -echo -echo "=============== START TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. $BASE_ENV/${machine}.env vrfy -status=$? -[[ $status -ne 0 ]] && exit $status +export CDUMP="${RUN/enkf}" -############################################################### -export COMPONENT=${COMPONENT:-atmos} -export CDATEm1=$($NDATE -24 $CDATE) -export PDYm1=$(echo $CDATEm1 | cut -c1-8) +CDATEm1=$(${NDATE} -24 "${PDY}${cyc}") +export CDATEm1 +export PDYm1=${CDATEm1:0:8} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export COMIN="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/vrfy.${jobid}" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT +CDATEm1c=$(${NDATE} -06 "${PDY}${cyc}") +PDYm1c=${CDATEm1c:0:8} +pcyc=${CDATEm1c:8:2} ############################################################### +# TODO: We can likely drop support for these dev-only grib1 precip files echo echo "=============== START TO GENERATE QUARTER DEGREE GRIB1 FILES ===============" -if [ $MKPGB4PRCP = "YES" -a $CDUMP = "gfs" ]; then - if [ ! -d $ARCDIR ]; then mkdir $ARCDIR ; fi +if [[ ${MKPGB4PRCP} = "YES" && ${CDUMP} == "gfs" ]]; then + YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_MASTER + if [ ! -d ${ARCDIR} ]; then mkdir -p ${ARCDIR} ; fi nthreads_env=${OMP_NUM_THREADS:-1} # get threads set in env export OMP_NUM_THREADS=1 - cd $COMIN - fhmax=${vhr_rain:-$FHMAX_GFS} - fhr=0 - while [ $fhr -le $fhmax ]; do - fhr2=$(printf %02i $fhr) - fhr3=$(printf %03i $fhr) - fname=${CDUMP}.t${cyc}z.sfluxgrbf$fhr3.grib2 - fileout=$ARCDIR/pgbq${fhr2}.${CDUMP}.${CDATE}.grib2 - $WGRIB2 $fname -match "(:PRATE:surface:)|(:TMP:2 m above ground:)" -grib $fileout - (( fhr = $fhr + 6 )) + cd "${COM_ATMOS_MASTER}" || exit 9 + fhmax=${vhr_rain:-${FHMAX_GFS}} + for (( fhr=0; fhr <= fhmax; fhr+=6 )); do + fhr2=$(printf %02i "${fhr}") + fhr3=$(printf %03i "${fhr}") + fname=${RUN}.t${cyc}z.sfluxgrbf${fhr3}.grib2 + fileout=${ARCDIR}/pgbq${fhr2}.${RUN}.${PDY}${cyc}.grib2 + ${WGRIB2} "${fname}" -match "(:PRATE:surface:)|(:TMP:2 m above ground:)" -grib "${fileout}" done - export OMP_NUM_THREADS=$nthreads_env # revert to threads set in env + export OMP_NUM_THREADS=${nthreads_env} # revert to threads set in env fi ############################################################### echo echo "=============== START TO RUN MOS ===============" -if [ $RUNMOS = "YES" -a $CDUMP = "gfs" ]; then - $RUNGFSMOSSH $PDY$cyc +if [[ "${RUNMOS}" == "YES" && "${CDUMP}" == "gfs" ]]; then + ${RUNGFSMOSSH} "${PDY}${cyc}" fi -############################################################### -echo -echo "=============== START TO RUN FIT2OBS VERIFICATION ===============" -if [ $VRFYFITS = "YES" -a $CDUMP = $CDFNL -a $CDATE != $SDATE ]; then - - export CDUMPFCST=$VDUMP - export TMPDIR="$RUNDIR/$CDATE/$CDUMP" - [[ ! -d $TMPDIR ]] && mkdir -p $TMPDIR - - xdate=$($NDATE -${VBACKUP_FITS} $CDATE) - - - export RUN_ENVIR_SAVE=$RUN_ENVIR - export RUN_ENVIR=$OUTPUT_FILE - - $PREPQFITSH $PSLOT $xdate $ROTDIR $ARCDIR $TMPDIR - - export RUN_ENVIR=$RUN_ENVIR_SAVE - -fi - - -############################################################### -echo -echo "=============== START TO RUN VSDB STEP1, VERIFY PRCIP AND GRID2OBS ===============" -if [ $CDUMP = "gfs" ]; then - - if [ $VSDB_STEP1 = "YES" -o $VRFYPRCP = "YES" -o $VRFYG2OBS = "YES" ]; then - - xdate=$(echo $($NDATE -${BACKDATEVSDB} $CDATE) | cut -c1-8) - export ARCDIR1="$NOSCRUB/archive" - export rundir="$RUNDIR/$CDUMP/$CDATE/vrfy/vsdb_exp" - export COMROT="$ARCDIR1/dummy" - - $VSDBJOBSH $VSDBSH $xdate $vlength $cyc $PSLOT $CDATE $CDUMP $gfs_cyc $rain_bucket $machine - fi -fi - ############################################################### echo echo "=============== START TO RUN RADMON DATA EXTRACTION ===============" -if [ $VRFYRAD = "YES" -a $CDUMP = $CDFNL -a $CDATE != $SDATE ]; then - export EXP=$PSLOT - export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" - export jlogfile="$ROTDIR/logs/$CDATE/${CDUMP}radmon.log" - export TANKverf_rad="$TANKverf/stats/$PSLOT/$CDUMP.$PDY" - export TANKverf_radM1="$TANKverf/stats/$PSLOT/$CDUMP.$PDYm1" - export MY_MACHINE=$machine +if [[ "${VRFYRAD}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then + + export EXP=${PSLOT} + export TANKverf_rad="${TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export TANKverf_radM1="${TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" + export MY_MACHINE=${machine} - $VRFYRADSH + ${VRFYRADSH} fi @@ -139,16 +76,14 @@ fi ############################################################### echo echo "=============== START TO RUN OZMON DATA EXTRACTION ===============" -if [ $VRFYOZN = "YES" -a $CDUMP = $CDFNL -a $CDATE != $SDATE ]; then +if [[ "${VRFYOZN}" == "YES" && "${CDUMP}" == "${CDFNL}" && "${PDY}${cyc}" != "${SDATE}" ]]; then - export EXP=$PSLOT - export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" - export jlogfile="$ROTDIR/logs/$CDATE/${CDUMP}oznmon.log" - export TANKverf_ozn="$TANKverf_ozn/stats/$PSLOT/$CDUMP.$PDY" - export TANKverf_oznM1="$TANKverf_ozn/stats/$PSLOT/$CDUMP.$PDYm1" - export MY_MACHINE=$machine + export EXP=${PSLOT} + export TANKverf_ozn="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export TANKverf_oznM1="${TANKverf_ozn}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" + export MY_MACHINE=${machine} - $VRFYOZNSH + ${VRFYOZNSH} fi @@ -156,15 +91,13 @@ fi ############################################################### echo echo "=============== START TO RUN MINMON ===============" -if [ $VRFYMINMON = "YES" -a $CDATE != $SDATE ]; then +if [[ "${VRFYMINMON}" == "YES" && "${PDY}${cyc}" != "${SDATE}" ]]; then - export COMOUT="$ROTDIR/$CDUMP.$PDY/$cyc/$COMPONENT" - export jlogfile="$ROTDIR/logs/$CDATE/${CDUMP}minmon.log" - export M_TANKverfM0="$M_TANKverf/stats/$PSLOT/$CDUMP.$PDY" - export M_TANKverfM1="$M_TANKverf/stats/$PSLOT/$CDUMP.$PDYm1" - export MY_MACHINE=$machine + export M_TANKverfM0="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDY}/${cyc}" + export M_TANKverfM1="${M_TANKverf}/stats/${PSLOT}/${RUN}.${PDYm1c}/${pcyc}" + export MY_MACHINE=${machine} - $VRFYMINSH + ${VRFYMINSH} fi @@ -172,39 +105,48 @@ fi ################################################################################ echo echo "=============== START TO RUN CYCLONE TRACK VERIFICATION ===============" -if [ $VRFYTRAK = "YES" ]; then - $TRACKERSH +if [[ ${VRFYTRAK} = "YES" ]]; then + + COMINsyn=${COMINsyn:-$(compath.py "${envir}/com/gfs/${gfs_ver}")/syndat} + export COMINsyn + + ${TRACKERSH} fi # GSL - rename tracker file and change AVNO to $ACTFNAME ## JKH export TRACKDIR="${ROTDIR}/../../tracks" if [ ! -d $TRACKDIR ]; then mkdir $TRACKDIR ; fi typeset -u ucatcf=$ATCFNAME -if [ -f $COMIN/avnop.t${cyc}z.cyclone.trackatcfunix ]; then - cat $COMIN/avnop.t${cyc}z.cyclone.trackatcfunix | sed s:AVNO:${ucatcf}:g > $TRACKDIR/tctrk.atcf.${CDATE}.${ATCFNAME}.txt - cp -p $TRACKDIR/tctrk.atcf.${CDATE}.${ATCFNAME}.txt $COMIN/tctrk.atcf.${CDATE}.${ATCFNAME}.txt - rm -f $COMIN/avnop.t${cyc}z.cyclone.trackatcfunix $COMIN/avno.t${cyc}z.cyclone.trackatcfunix - echo "$COMIN/avno*.t${cyc}z.cyclone.trackatcfunix deleted...." +YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_TRACK +if [ -f ${COM_ATMOS_TRACK}/avnop.t${cyc}z.cyclone.trackatcfunix ]; then + cat $COM_ATMOS_TRACK/avnop.t${cyc}z.cyclone.trackatcfunix | sed s:AVNO:${ucatcf}:g > $TRACKDIR/tctrk.atcf.${CDATE}.${ATCFNAME}.txt + cp -p $TRACKDIR/tctrk.atcf.${CDATE}.${ATCFNAME}.txt $COM_ATMOS_TRACK/tctrk.atcf.${CDATE}.${ATCFNAME}.txt + rm -f $COM_ATMOS_TRACK/avnop.t${cyc}z.cyclone.trackatcfunix $COM_ATMOS_TRACK/avno.t${cyc}z.cyclone.trackatcfunix + echo "$COM_ATMOS_TRACK/avno*.t${cyc}z.cyclone.trackatcfunix deleted...." else echo "no track file created...." fi + ################################################################################ echo echo "=============== START TO RUN CYCLONE GENESIS VERIFICATION ===============" -if [ $VRFYGENESIS = "YES" -a $CDUMP = "gfs" ]; then - $GENESISSH +if [[ ${VRFYGENESIS} = "YES" && "${CDUMP}" = "gfs" ]]; then + ${GENESISSH} fi ################################################################################ echo echo "=============== START TO RUN CYCLONE GENESIS VERIFICATION (FSU) ===============" -if [ $VRFYFSU = "YES" -a $CDUMP = "gfs" ]; then - $GENESISFSU +if [[ ${VRFYFSU} = "YES" && "${CDUMP}" = "gfs" ]]; then + ${GENESISFSU} fi ############################################################### # Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi +cd "${DATAROOT}" +if [[ ${KEEPDATA:-"NO"} = "NO" ]] ; then rm -rf "${DATA}" ; fi + + exit 0 diff --git a/jobs/rocoto/wafs.sh b/jobs/rocoto/wafs.sh index f50f1b6801..59d1ede139 100755 --- a/jobs/rocoto/wafs.sh +++ b/jobs/rocoto/wafs.sh @@ -1,55 +1,46 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wafs" +export jobid="${job}.$$" ############################################################### +############################################################### +# TODO: sourcing configs should be in the j-job echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" configs="base wafs" -for config in $configs; do - . $EXPDIR/config.${config} +for config in ${configs}; do + . ${EXPDIR}/config.${config} status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done ############################################################### -export pid=${pid:-$$} -export jobid=${job}.${pid} -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/wafs.$jobid" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT - -export DATA="${DATAROOT}/$job" - -############################################################### echo echo "=============== START TO RUN WAFS ===============" # Loop through fcsthrs hr=0 -while [ $hr -le 120 ]; do +while [ ${hr} -le 120 ]; do - if [ $hr -le 100 ]; then - export fcsthrs="$(printf "%02d" $(( 10#$hr )) )" - else - export fcsthrs=$hr - fi + export fcsthrs=$(printf "%03d" ${hr}) # Execute the JJOB - $HOMEgfs/jobs/JGFS_ATMOS_WAFS + ${HOMEgfs}/jobs/JGFS_ATMOS_WAFS status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} - hr=$(expr $hr + 6) + hr=$(expr ${hr} + 6) done -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wafsblending.sh b/jobs/rocoto/wafsblending.sh index bbdb9f8205..e16e8fa2b3 100755 --- a/jobs/rocoto/wafsblending.sh +++ b/jobs/rocoto/wafsblending.sh @@ -1,40 +1,38 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wafsblending" +export jobid="${job}.$$" ############################################################### +# TODO: sourcing configs should be in the j-job echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" configs="base wafsblending" -for config in $configs; do - . $EXPDIR/config.${config} +for config in ${configs}; do + . ${EXPDIR}/config.${config} status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done -############################################################### - -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/wafsblending" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT - -export pid=${pid:-$$} -export jobid=${job}.${pid} -export DATA="${DATAROOT}/$job" +# TODO: Mising source machine runtime environment ############################################################### + echo echo "=============== START TO RUN WAFSBLENDING ===============" # Execute the JJOB -$HOMEgfs/jobs/JGFS_ATMOS_WAFS_BLENDING +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_BLENDING status=$? -exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + exit 0 diff --git a/jobs/rocoto/wafsblending0p25.sh b/jobs/rocoto/wafsblending0p25.sh index 70f7c69005..11788baf4d 100755 --- a/jobs/rocoto/wafsblending0p25.sh +++ b/jobs/rocoto/wafsblending0p25.sh @@ -1,40 +1,38 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +export job="wafsblending0p25" +export jobid="${job}.$$" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### +# TODO: sourcing configs should be in the j-job echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" configs="base wafsblending0p25" -for config in $configs; do - . $EXPDIR/config.${config} +for config in ${configs}; do + . ${EXPDIR}/config.${config} status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done -############################################################### - -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/wafsblending0p25" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT - -export pid=${pid:-$$} -export jobid=${job}.${pid} -export DATA="${DATAROOT}/$job" +# TODO: Mising source machine runtime environment ############################################################### + echo echo "=============== START TO RUN WAFSBLENDING0P25 ===============" # Execute the JJOB -$HOMEgfs/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_BLENDING_0P25 status=$? -exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + exit 0 diff --git a/jobs/rocoto/wafsgcip.sh b/jobs/rocoto/wafsgcip.sh index 8ecc6fb922..36b2b491d7 100755 --- a/jobs/rocoto/wafsgcip.sh +++ b/jobs/rocoto/wafsgcip.sh @@ -1,40 +1,45 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wafsgcip" +export jobid="${job}.$$" +# ############################################################### +# TODO: sourcing configs should be in the j-job echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" configs="base wafsgcip" -for config in $configs; do - . $EXPDIR/config.${config} +for config in ${configs}; do + . ${EXPDIR}/config.${config} status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done -############################################################### - -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/wafsgcip" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT +########################################## +# Source machine runtime environment +########################################## +. ${HOMEgfs}/env/${machine}.env wafsgcip +status=$? +[[ ${status} -ne 0 ]] && exit ${status} -export pid=${pid:-$$} -export jobid=${job}.${pid} -export DATA="${DATAROOT}/$job" +############################################################### ############################################################### echo echo "=============== START TO RUN WAFSGCIP ===============" # Execute the JJOB -$HOMEgfs/jobs/JGFS_ATMOS_WAFS_GCIP +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_GCIP status=$? -exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + exit 0 diff --git a/jobs/rocoto/wafsgrib2.sh b/jobs/rocoto/wafsgrib2.sh index d6c379db8e..a2903e5aa2 100755 --- a/jobs/rocoto/wafsgrib2.sh +++ b/jobs/rocoto/wafsgrib2.sh @@ -1,40 +1,38 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wafsgrib2" +export jobid=${job}.$$ ############################################################### +# TODO: Sourcing configs should be done in the j-job echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" configs="base wafsgrib2" -for config in $configs; do - . $EXPDIR/config.${config} +for config in ${configs}; do + . ${EXPDIR}/config.${config} status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done -############################################################### - -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/wafsgrib2" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT - -export pid=${pid:-$$} -export jobid=${job}.${pid} -export DATA="${DATAROOT}/$job" +# TODO: Missing sourcing of $MACHINE.env ############################################################### + echo echo "=============== START TO RUN WAFSGRIB2 ===============" # Execute the JJOB -$HOMEgfs/jobs/JGFS_ATMOS_WAFS_GRIB2 +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_GRIB2 status=$? -exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + exit 0 diff --git a/jobs/rocoto/wafsgrib20p25.sh b/jobs/rocoto/wafsgrib20p25.sh index ef95f47867..585ca23524 100755 --- a/jobs/rocoto/wafsgrib20p25.sh +++ b/jobs/rocoto/wafsgrib20p25.sh @@ -1,40 +1,37 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wafsgrib20p25" +export jobid="${job}.$$" ############################################################### +# TODO: sourcing configs should be in the j-job echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" configs="base wafsgrib20p25" -for config in $configs; do - . $EXPDIR/config.${config} +for config in ${configs}; do + . ${EXPDIR}/config.${config} status=$? - [[ $status -ne 0 ]] && exit $status + [[ ${status} -ne 0 ]] && exit ${status} done -############################################################### - -export DATAROOT="$RUNDIR/$CDATE/$CDUMP/wafsgrib20p25" -[[ -d $DATAROOT ]] && rm -rf $DATAROOT -mkdir -p $DATAROOT - -export pid=${pid:-$$} -export jobid=${job}.${pid} -export DATA="${DATAROOT}/$job" +# TODO: missing sourcing $MACHINE.env ############################################################### echo echo "=============== START TO RUN WAFSGRIB20p25 ===============" # Execute the JJOB -$HOMEgfs/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 +${HOMEgfs}/jobs/JGFS_ATMOS_WAFS_GRIB2_0P25 status=$? -exit $status +[[ ${status} -ne 0 ]] && exit ${status} ############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi + exit 0 diff --git a/jobs/rocoto/waveawipsbulls.sh b/jobs/rocoto/waveawipsbulls.sh index 6461a7aac9..4b6d6e1e82 100755 --- a/jobs/rocoto/waveawipsbulls.sh +++ b/jobs/rocoto/waveawipsbulls.sh @@ -1,36 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash -############################################################### -echo -echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -echo -echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" -configs="base waveawipsbulls" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done +source "$HOMEgfs/ush/preamble.sh" ############################################################### -echo -echo "=============== BEGIN TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. $BASE_ENV/${machine}.env waveawipsbulls +# Source FV3GFS workflow modules +source ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -export DBNROOT=/dev/null +export job="waveawipsbulls" +export jobid="${job}.$$" ############################################################### -echo -echo "=============== START TO RUN WAVE PRDGEN BULLS ===============" # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_WAVE_PRDGEN_BULLS status=$? -exit $status + +exit $status diff --git a/jobs/rocoto/waveawipsgridded.sh b/jobs/rocoto/waveawipsgridded.sh index 70e85dc2c7..c10f2f39fd 100755 --- a/jobs/rocoto/waveawipsgridded.sh +++ b/jobs/rocoto/waveawipsgridded.sh @@ -1,35 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash -############################################################### -echo -echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -echo -echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" -configs="base waveawipsgridded" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done +source "$HOMEgfs/ush/preamble.sh" ############################################################### -echo -echo "=============== BEGIN TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. $BASE_ENV/${machine}.env waveawipsgridded +# Source FV3GFS workflow modules +source ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -export DBNROOT=/dev/null +export job="waveawipsgridded" +export jobid="${job}.$$" ############################################################### -echo -echo "=============== START TO RUN WAVE PRDGEN GRIDDED ===============" # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED +${HOMEgfs}/jobs/JGLOBAL_WAVE_PRDGEN_GRIDDED status=$? -exit $status + + +exit ${status} diff --git a/jobs/rocoto/wavegempak.sh b/jobs/rocoto/wavegempak.sh index 870f6281b4..58fbcdcc5b 100755 --- a/jobs/rocoto/wavegempak.sh +++ b/jobs/rocoto/wavegempak.sh @@ -1,33 +1,18 @@ -#!/bin/bash -x +#! /usr/bin/env bash -############################################################### -echo -echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh -status=$? -[[ $status -ne 0 ]] && exit $status +source "$HOMEgfs/ush/preamble.sh" ############################################################### -echo -echo "=============== BEGIN TO SOURCE RELEVANT CONFIGS ===============" -configs="base wavegempak" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done - -############################################################### -echo -echo "=============== BEGIN TO SOURCE MACHINE RUNTIME ENVIRONMENT ===============" -. $BASE_ENV/${machine}.env wavegempak +source $HOMEgfs/ush/load_fv3gfs_modules.sh status=$? [[ $status -ne 0 ]] && exit $status +export job="post" +export jobid="${job}.$$" + ############################################################### -echo -echo "=============== START TO RUN WAVE GEMPAK ===============" # Execute the JJOB $HOMEgfs/jobs/JGLOBAL_WAVE_GEMPAK status=$? + exit $status diff --git a/jobs/rocoto/waveinit.sh b/jobs/rocoto/waveinit.sh index 14a6a5cdeb..d0c3f49929 100755 --- a/jobs/rocoto/waveinit.sh +++ b/jobs/rocoto/waveinit.sh @@ -1,21 +1,23 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="waveinit" +export jobid="${job}.$$" ############################################################### echo echo "=============== START TO RUN WAVE INIT ===============" # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_WAVE_INIT +${HOMEgfs}/jobs/JGLOBAL_WAVE_INIT status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wavepostbndpnt.sh b/jobs/rocoto/wavepostbndpnt.sh index 52c5d9fafe..5d26498356 100755 --- a/jobs/rocoto/wavepostbndpnt.sh +++ b/jobs/rocoto/wavepostbndpnt.sh @@ -1,21 +1,23 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wavepostbndpnt" +export jobid="${job}.$$" ############################################################### echo echo "=============== START TO RUN WAVE_POST_BNDPNT ===============" # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_WAVE_POST_BNDPNT +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_BNDPNT status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wavepostbndpntbll.sh b/jobs/rocoto/wavepostbndpntbll.sh index 72fb77bd1e..ce4f9e6b2d 100755 --- a/jobs/rocoto/wavepostbndpntbll.sh +++ b/jobs/rocoto/wavepostbndpntbll.sh @@ -1,21 +1,23 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wavepostbndpntbll" +export jobid="${job}.$$" ############################################################### echo echo "=============== START TO RUN WAVE_POST_BNDPNT ===============" # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_BNDPNTBLL status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wavepostpnt.sh b/jobs/rocoto/wavepostpnt.sh index f00c09550b..9efb755dec 100755 --- a/jobs/rocoto/wavepostpnt.sh +++ b/jobs/rocoto/wavepostpnt.sh @@ -1,21 +1,23 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wavepostpnt" +export jobid="${job}.$$" ############################################################### echo echo "=============== START TO RUN WAVE_POST_PNT ===============" # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_WAVE_POST_PNT +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_PNT status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/wavepostsbs.sh b/jobs/rocoto/wavepostsbs.sh index d8ec7cc268..e4bea0bc34 100755 --- a/jobs/rocoto/wavepostsbs.sh +++ b/jobs/rocoto/wavepostsbs.sh @@ -1,21 +1,20 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### -echo -echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +# Source FV3GFS workflow modules +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="wavepostsbs" +export jobid="${job}.$$" ############################################################### -echo -echo "=============== START TO RUN WAVE POST_SBS ===============" # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_WAVE_POST_SBS +${HOMEgfs}/jobs/JGLOBAL_WAVE_POST_SBS status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/jobs/rocoto/waveprep.sh b/jobs/rocoto/waveprep.sh index 9705fbc31a..0cbafde87e 100755 --- a/jobs/rocoto/waveprep.sh +++ b/jobs/rocoto/waveprep.sh @@ -1,21 +1,23 @@ -#!/bin/bash -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" ############################################################### echo echo "=============== START TO SOURCE FV3GFS WORKFLOW MODULES ===============" -. $HOMEgfs/ush/load_fv3gfs_modules.sh +. ${HOMEgfs}/ush/load_fv3gfs_modules.sh status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} + +export job="waveprep" +export jobid="${job}.$$" ############################################################### echo echo "=============== START TO RUN WAVE PREP ===============" # Execute the JJOB -$HOMEgfs/jobs/JGLOBAL_WAVE_PREP +${HOMEgfs}/jobs/JGLOBAL_WAVE_PREP status=$? -[[ $status -ne 0 ]] && exit $status +[[ ${status} -ne 0 ]] && exit ${status} -############################################################### -# Force Exit out cleanly -if [ ${KEEPDATA:-"NO"} = "NO" ] ; then rm -rf $DATAROOT ; fi exit 0 diff --git a/modulefiles/module-setup.csh.inc b/modulefiles/module-setup.csh.inc index aafe5c7fc5..a3cd672300 100644 --- a/modulefiles/module-setup.csh.inc +++ b/modulefiles/module-setup.csh.inc @@ -2,9 +2,16 @@ set __ms_shell=csh eval "if ( -d / ) set __ms_shell=tcsh" -if ( { test -d /lfs3 } ) then +if ( { test -d /lfs/f1 } ) then + # We are on NOAA Cactus or Dogwood if ( ! { module help >& /dev/null } ) then - source /apps/lmod/lmod/init/$__ms_shell + source /usr/share/lmod/lmod/init/$__ms_shell + fi + module reset +else if ( { test -d /mnt/lfs1 } ) then + # We are on NOAA Jet + if ( ! { module help >& /dev/null } ) then + source /apps/lmod/lmod/init/$__ms_shell endif module purge else if ( { test -d /scratch1 } ) then @@ -19,45 +26,12 @@ else if ( { test -d /work } ) then source /apps/lmod/init/$__ms_shell endif module purge -else if ( { test -d /scratch1 } ) then - # We are on NOAA Hera - if ( ! { module help >& /dev/null } ) then - source /apps/lmod/lmod/init/$__ms_shell - endif - module purge -else if ( { test -d /jetmon } ) then - # We are on NOAA Jet - if ( ! { module help >& /dev/null } ) then - source /apps/lmod/lmod/init/$__ms_shell - endif - module purge -else if ( { test -d /gpfs/hps -a -e /etc/SuSE-release } ) then - # We are on NOAA Luna or Surge - if ( ! { module help >& /dev/null } ) then - source /opt/modules/default/init/$__ms_shell - endif - module purge - module purge - unset _LMFILES_ - unset LOADEDMODULES - module use /opt/modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/alt-modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /usrx/local/prod/modulefiles -else if ( { test -d /dcom -a -d /hwrf } ) then - # We are on NOAA Tide or Gyre - if ( ! { module help >& /dev/null } ) then - source /usrx/local/Modules/default/init/$__ms_shell - endif - module purge -else if ( { test -L /usrx && sh -c "readlink /usrx 2> /dev/null | grep dell" } ) then - # We are on WCOSS Mars or Venus +else if ( { test -d /data/prod } ) then + # We are on SSEC S4 if ( ! { module help >& /dev/null } ) then - source /usrx/local/prod/lmod/lmod/init/$__ms_shell + source /usr/share/lmod/lmod/init/$__ms_shell endif + source /etc/profile module purge else if ( { test -d /glade } ) then # We are on NCAR Yellowstone @@ -66,7 +40,7 @@ else if ( { test -d /glade } ) then endif module purge else if ( { test -d /lustre -a -d /ncrc } ) then - # We are on GAEA. + # We are on GAEA. if ( ! { module help >& /dev/null } ) then # We cannot simply load the module command. The GAEA # /etc/csh.login modifies a number of module-related variables diff --git a/modulefiles/module-setup.sh.inc b/modulefiles/module-setup.sh.inc index 75c7249ea4..e5322cbb2c 100644 --- a/modulefiles/module-setup.sh.inc +++ b/modulefiles/module-setup.sh.inc @@ -16,7 +16,13 @@ else __ms_shell=sh fi -if [[ -d /lfs3 ]] ; then +if [[ -d /lfs/f1 ]] ; then + # We are on NOAA Cactus or Dogwood + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /usr/share/lmod/lmod/init/$__ms_shell + fi + module reset +elif [[ -d /mnt/lfs1 ]] ; then # We are on NOAA Jet if ( ! eval module help > /dev/null 2>&1 ) ; then source /apps/lmod/lmod/init/$__ms_shell @@ -34,35 +40,6 @@ elif [[ -d /work ]] ; then source /apps/lmod/lmod/init/$__ms_shell fi module purge -elif [[ -d /gpfs/hps && -e /etc/SuSE-release ]] ; then - # We are on NOAA Luna or Surge - if ( ! eval module help > /dev/null 2>&1 ) ; then - source /opt/modules/default/init/$__ms_shell - fi - module purge - module purge - # Workaround until module issues are fixed: - unset _LMFILES_ - unset LOADEDMODULES - module use /opt/modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/alt-modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /usrx/local/prod/modulefiles -elif [[ -d /dcom && -d /hwrf ]] ; then - # We are on NOAA Tide or Gyre - if ( ! eval module help > /dev/null 2>&1 ) ; then - source /usrx/local/Modules/default/init/$__ms_shell - fi - module purge -elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then - # We are on NOAA Mars or Venus - if ( ! eval module help > /dev/null 2>&1 ) ; then - source /usrx/local/prod/lmod/lmod/init/$__ms_shell - fi - module purge elif [[ -d /glade ]] ; then # We are on NCAR Yellowstone if ( ! eval module help > /dev/null 2>&1 ) ; then @@ -70,7 +47,7 @@ elif [[ -d /glade ]] ; then fi module purge elif [[ -d /lustre && -d /ncrc ]] ; then - # We are on GAEA. + # We are on GAEA. if ( ! eval module help > /dev/null 2>&1 ) ; then # We cannot simply load the module command. The GAEA # /etc/profile modifies a number of module-related variables @@ -83,7 +60,7 @@ elif [[ -d /lustre && -d /ncrc ]] ; then __ms_source_etc_profile=no fi module purge - # clean up after purge + # clean up after purge unset _LMFILES_ unset _LMFILES_000 unset _LMFILES_001 @@ -109,6 +86,12 @@ elif [[ -d /lustre && -d /ncrc ]] ; then source /etc/profile unset __ms_source_etc_profile fi +elif [[ -d /data/prod ]] ; then + # We are on SSEC's S4 + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /usr/share/lmod/lmod/init/$__ms_shell + fi + module purge else echo WARNING: UNKNOWN PLATFORM 1>&2 fi diff --git a/modulefiles/module_base.hera.lua b/modulefiles/module_base.hera.lua index e9fe2088a9..9634202bb5 100644 --- a/modulefiles/module_base.hera.lua +++ b/modulefiles/module_base.hera.lua @@ -9,41 +9,30 @@ load(pathJoin("hpc-intel", "18.0.5.274")) load(pathJoin("hpc-impi", "2018.0.4")) load(pathJoin("hpss", "hpss")) -load(pathJoin("nco", "4.9.1")) load(pathJoin("gempak", "7.4.2")) load(pathJoin("ncl", "6.6.2")) - -load(pathJoin("prod_util", "1.2.2")) -load(pathJoin("grib_util", "1.2.2")) - -load(pathJoin("crtm", "2.3.0")) -setenv("CRTM_FIX","/scratch2/NCEPDEV/nwprod/NCEPLIBS/fix/crtm_v2.3.0") - load(pathJoin("jasper", "2.0.25")) -load(pathJoin("zlib", "1.2.11")) load(pathJoin("png", "1.6.35")) +load(pathJoin("cdo", "1.9.5")) +load(pathJoin("R", "3.5.0")) load(pathJoin("hdf5", "1.10.6")) load(pathJoin("netcdf", "4.7.4")) -load(pathJoin("pio", "2.5.2")) -load(pathJoin("esmf", "8.2.1b04")) -load(pathJoin("fms", "2021.03")) -load(pathJoin("bacio", "2.4.1")) -load(pathJoin("g2", "3.4.2")) +load(pathJoin("nco", "4.9.1")) +load(pathJoin("prod_util", "1.2.2")) +load(pathJoin("grib_util", "1.2.2")) load(pathJoin("g2tmpl", "1.10.0")) -load(pathJoin("ip", "3.3.3")) -load(pathJoin("nemsio", "2.5.2")) -load(pathJoin("sp", "2.3.3")) -load(pathJoin("w3emc", "2.7.3")) -load(pathJoin("w3nco", "2.4.1")) - +load(pathJoin("ncdiag", "1.0.0")) +load(pathJoin("crtm", "2.4.0")) load(pathJoin("wgrib2", "2.0.8")) setenv("WGRIB2","wgrib2") -load(pathJoin("cdo", "1.9.5")) +prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) -load(pathJoin("R", "3.5.0")) +prepend_path("MODULEPATH", pathJoin("/scratch1/NCEPDEV/global/glopara/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) -- Temporary until official hpc-stack is updated prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") diff --git a/modulefiles/module_base.jet.lua b/modulefiles/module_base.jet.lua index cb8e632086..dbca3c4500 100644 --- a/modulefiles/module_base.jet.lua +++ b/modulefiles/module_base.jet.lua @@ -2,35 +2,42 @@ help([[ Load environment to run GFS on Jet ]]) -prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/nwprod/hpc-stack/libs/modulefiles/stack") +prepend_path("MODULEPATH", "/lfs4/HFIP/hfv3gfs/role.epic/hpc-stack/libs/intel-18.0.5.274/modulefiles/stack") -load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc", "1.2.0")) load(pathJoin("hpc-intel", "18.0.5.274")) load(pathJoin("hpc-impi", "2018.4.274")) +load(pathJoin("cmake", "3.20.1")) load("hpss") -load(pathJoin("nco", "4.9.1")) load(pathJoin("gempak", "7.4.2")) load(pathJoin("ncl", "6.6.2")) - -load(pathJoin("prod_util", "1.2.2")) -load(pathJoin("grib_util", "1.2.2")) - -load(pathJoin("crtm", "2.3.0")) -setenv("CRTM_FIX","/lfs4/HFIP/hfv3gfs/nwprod/NCEPLIBS/fix/crtm_v2.3.0") +load(pathJoin("jasper", "2.0.25")) +load(pathJoin("libpng", "1.6.35")) +load(pathJoin("cdo", "1.9.5")) +load(pathJoin("R", "4.0.2")) load(pathJoin("hdf5", "1.10.6")) load(pathJoin("netcdf", "4.7.4")) -load(pathJoin("esmf", "8_2_0_beta_snapshot_14")) -load(pathJoin("fms", "2021.03")) +load(pathJoin("nco", "4.9.1")) +load(pathJoin("prod_util", "1.2.2")) +load(pathJoin("grib_util", "1.2.2")) load(pathJoin("g2tmpl", "1.10.0")) - +load(pathJoin("ncdiag", "1.0.0")) +load(pathJoin("crtm", "2.4.0")) load(pathJoin("wgrib2", "2.0.8")) setenv("WGRIB2","wgrib2") -load(pathJoin("cdo", "1.9.5")) +prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/v1.0.1/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) + +prepend_path("MODULEPATH", "/contrib/anaconda/modulefiles") +load(pathJoin("anaconda", "5.3.1")) -load("rocoto") +prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) +prepend_path("MODULEPATH", pathJoin("/lfs4/HFIP/hfv3gfs/glopara/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.orion.lua b/modulefiles/module_base.orion.lua old mode 100755 new mode 100644 index 45fa6c07f0..0c9033c07f --- a/modulefiles/module_base.orion.lua +++ b/modulefiles/module_base.orion.lua @@ -2,49 +2,36 @@ help([[ Load environment to run GFS on Orion ]]) -prepend_path("MODULEPATH", "/apps/contrib/NCEP/libs/hpc-stack/modulefiles/stack") +prepend_path("MODULEPATH", "/apps/contrib/NCEP/hpc-stack/libs/hpc-stack/modulefiles/stack") load(pathJoin("hpc", "1.1.0")) load(pathJoin("hpc-intel", "2018.4")) load(pathJoin("hpc-impi", "2018.4")) -load(pathJoin("nco", "4.8.1")) load(pathJoin("gempak", "7.5.1")) load(pathJoin("ncl", "6.6.2")) - -load(pathJoin("prod_util", "1.2.2")) -load(pathJoin("grib_util", "1.2.2")) - -load(pathJoin("crtm", "2.3.0")) -setenv("CRTM_FIX","/apps/contrib/NCEPLIBS/orion/fix/crtm_v2.3.0") - load(pathJoin("jasper", "2.0.25")) load(pathJoin("zlib", "1.2.11")) load(pathJoin("png", "1.6.35")) +load(pathJoin("cdo", "1.9.5")) load(pathJoin("hdf5", "1.10.6")) load(pathJoin("netcdf", "4.7.4")) -load(pathJoin("pio", "2.5.2")) -load(pathJoin("esmf", "8.2.1b04")) -load(pathJoin("fms", "2021.03")) -load(pathJoin("bacio", "2.4.1")) -load(pathJoin("g2", "3.4.2")) +load(pathJoin("nco", "4.8.1")) +load(pathJoin("prod_util", "1.2.2")) +load(pathJoin("grib_util", "1.2.2")) load(pathJoin("g2tmpl", "1.10.0")) -load(pathJoin("ip", "3.3.3")) -load(pathJoin("nemsio", "2.5.2")) -load(pathJoin("sp", "2.3.3")) -load(pathJoin("w3emc", "2.7.3")) -load(pathJoin("w3nco", "2.4.1")) - +load(pathJoin("ncdiag", "1.0.0")) +load(pathJoin("crtm", "2.4.0")) load(pathJoin("wgrib2", "2.0.8")) setenv("WGRIB2","wgrib2") -load("contrib") -load(pathJoin("rocoto", "1.3.3")) -load(pathJoin("slurm", "19.05.3-2")) +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) -load(pathJoin("cdo", "1.9.5")) +prepend_path("MODULEPATH", pathJoin("/work/noaa/global/glopara/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) -- Temporary until official hpc-stack is updated prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") diff --git a/modulefiles/module_base.s4.lua b/modulefiles/module_base.s4.lua new file mode 100644 index 0000000000..5bd0f1d6fb --- /dev/null +++ b/modulefiles/module_base.s4.lua @@ -0,0 +1,37 @@ +help([[ +Load environment to run GFS on S4 +]]) + +load("license_intel") +prepend_path("MODULEPATH", "/data/prod/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "18.0.4")) +load(pathJoin("hpc-impi", "18.0.4")) + +load(pathJoin("miniconda", "3.8-s4")) +load(pathJoin("ncl", "6.4.0-precompiled")) +load(pathJoin("cdo", "1.9.8")) +load(pathJoin("jasper", "2.0.25")) +load(pathJoin("zlib", "1.2.11")) +load(pathJoin("png", "1.6.35")) + +load(pathJoin("hdf5", "1.10.6")) +load(pathJoin("netcdf", "4.7.4")) + +load(pathJoin("nco", "4.9.3")) +load(pathJoin("prod_util", "1.2.2")) +load(pathJoin("grib_util", "1.2.2")) +load(pathJoin("g2tmpl", "1.10.0")) +load(pathJoin("ncdiag", "1.0.0")) +load(pathJoin("crtm", "2.4.0")) +load(pathJoin("wgrib2", "2.0.8")) +setenv("WGRIB2","wgrib2") + +prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/prepobs/feature-GFSv17_com_reorg/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) + +prepend_path("MODULEPATH", pathJoin("/data/prod/glopara/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) + +whatis("Description: GFS run environment") diff --git a/modulefiles/module_base.wcoss2.lua b/modulefiles/module_base.wcoss2.lua new file mode 100644 index 0000000000..c27d69901a --- /dev/null +++ b/modulefiles/module_base.wcoss2.lua @@ -0,0 +1,40 @@ +help([[ +Load environment to run GFS on WCOSS2 +]]) + +load(pathJoin("PrgEnv-intel", "8.1.0")) +load(pathJoin("craype", "2.7.13")) +load(pathJoin("intel", "19.1.3.304")) +load(pathJoin("cray-mpich", "8.1.9")) +load(pathJoin("cray-pals", "1.0.17")) +load(pathJoin("cfp", "2.0.4")) +setenv("USE_CFP","YES") + +load(pathJoin("python", "3.8.6")) +load(pathJoin("gempak", "7.14.1")) +load(pathJoin("perl", "5.32.0")) +load(pathJoin("libjpeg", "9c")) +load(pathJoin("udunits", "2.2.28")) +load(pathJoin("gsl", "2.7")) +load(pathJoin("cdo", "1.9.8")) + +load(pathJoin("hdf5", "1.10.6")) +load(pathJoin("netcdf", "4.7.4")) + +load(pathJoin("nco", "4.7.9")) +load(pathJoin("prod_util", "2.0.9")) +load(pathJoin("grib_util", "1.2.3")) +load(pathJoin("bufr_dump", "1.0.0")) +load(pathJoin("util_shared", "1.4.0")) +load(pathJoin("g2tmpl", "1.9.1")) +load(pathJoin("ncdiag", "1.0.0")) +load(pathJoin("crtm", "2.4.0")) +load(pathJoin("wgrib2", "2.0.7")) + +prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/prepobs/feature-GFSv17_com_reorg_log_update/modulefiles")) +load(pathJoin("prepobs", "1.0.1")) + +prepend_path("MODULEPATH", pathJoin("/lfs/h2/emc/global/save/emc.global/git/Fit2Obs/v1.0.0/modulefiles")) +load(pathJoin("fit2obs", "1.0.0")) + +whatis("Description: GFS run environment") diff --git a/modulefiles/module_gwci.hera.lua b/modulefiles/module_gwci.hera.lua new file mode 100644 index 0000000000..f4b62a5fd2 --- /dev/null +++ b/modulefiles/module_gwci.hera.lua @@ -0,0 +1,15 @@ +help([[ +Load environment to run GFS workflow setup scripts on Hera +]]) + +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "18.0.5.274")) +load(pathJoin("hpc-impi", "2018.0.4")) + +load(pathJoin("netcdf","4.7.4")) +load(pathJoin("nccmp","1.8.7.0")) +load(pathJoin("wgrib2", "2.0.8")) + +whatis("Description: GFS run setup CI environment") diff --git a/modulefiles/module_gwci.orion.lua b/modulefiles/module_gwci.orion.lua new file mode 100644 index 0000000000..971ba01c65 --- /dev/null +++ b/modulefiles/module_gwci.orion.lua @@ -0,0 +1,22 @@ +help([[ +Load environment to run GFS workflow ci scripts on Orion +]]) + +prepend_path("MODULEPATH", "/apps/contrib/NCEP/hpc-stack/libs/hpc-stack/modulefiles/stack") + +load(pathJoin("hpc", "1.1.0")) +load(pathJoin("hpc-intel", "2018.4")) +load(pathJoin("hpc-impi", "2018.4")) +load(pathJoin("netcdf","4.7.4")) +load(pathJoin("netcdf","4.7.4-parallel")) +load(pathJoin("nccmp"," 1.8.7.0")) +load(pathJoin("contrib","0.1")) +load(pathJoin("wgrib2","3.0.2")) + +prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") +load(pathJoin("hpc", "1.2.0")) +load(pathJoin("hpc-intel", "2018.4")) +load(pathJoin("hpc-miniconda3", "4.6.14")) +load(pathJoin("gfs_workflow", "1.0.0")) + +whatis("Description: GFS run ci top-level sripts environment") diff --git a/modulefiles/module_gwsetup.hera.lua b/modulefiles/module_gwsetup.hera.lua new file mode 100644 index 0000000000..a07b32b6a6 --- /dev/null +++ b/modulefiles/module_gwsetup.hera.lua @@ -0,0 +1,13 @@ +help([[ +Load environment to run GFS workflow setup scripts on Hera +]]) + +load(pathJoin("rocoto")) + +-- Temporary until official hpc-stack is updated +prepend_path("MODULEPATH", "/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/hpc-stack/modulefiles/stack") +load(pathJoin("hpc", "1.2.0")) +load(pathJoin("hpc-miniconda3", "4.6.14")) +load(pathJoin("gfs_workflow", "1.0.0")) + +whatis("Description: GFS run setup environment") diff --git a/modulefiles/module_gwsetup.orion.lua b/modulefiles/module_gwsetup.orion.lua new file mode 100644 index 0000000000..37f3187fb4 --- /dev/null +++ b/modulefiles/module_gwsetup.orion.lua @@ -0,0 +1,17 @@ +help([[ +Load environment to run GFS workflow ci scripts on Orion +]]) + +-- Temporary until official hpc-stack is updated + +prepend_path("MODULEPATH", "/apps/modulefiles/core") +load(pathJoin("contrib","0.1")) +load(pathJoin("rocoto","1.3.3")) +load(pathJoin("git","2.28.0")) + +prepend_path("MODULEPATH", "/work2/noaa/global/wkolczyn/save/hpc-stack/modulefiles/stack") +load(pathJoin("hpc", "1.2.0")) +load(pathJoin("hpc-miniconda3", "4.6.14")) +load(pathJoin("gfs_workflow", "1.0.0")) + +whatis("Description: GFS run ci top-level sripts environment") diff --git a/parm/chem/AERO_HISTORY.rc b/parm/chem/AERO_HISTORY.rc index e71d970f16..19f572bb8c 100644 --- a/parm/chem/AERO_HISTORY.rc +++ b/parm/chem/AERO_HISTORY.rc @@ -385,8 +385,8 @@ PC720x361-DC.LM: 72 inst_aod.duration: 010000, inst_aod.ref_time: 000000, inst_aod.grid_label: PC720x361-DC - inst_aod.fields: 'CAEXTTAUCA.bc' , 'CA.bc' , 'AOD_BC', - 'CAEXTTAUCA.oc' , 'CA.oc' , 'AOD_OC', + inst_aod.fields: 'CA.bcEXTTAU' , 'CA.bc' , 'AOD_BC', + 'CA.ocEXTTAU' , 'CA.oc' , 'AOD_OC', 'DUEXTTAU' , 'DU' , 'AOD_DU', 'NIEXTTAU' , 'NI' , 'AOD_NI', 'SSEXTTAU' , 'SS' , 'AOD_SS', @@ -403,8 +403,8 @@ PC720x361-DC.LM: 72 tavg_2d_rad.duration: 120000, tavg_2d_rad.ref_time: 000000, tavg_2d_rad.grid_label: PC720x361-DC - tavg_2d_rad.fields: 'CAEXTTAUCA.bc' , 'CA.bc' , - 'CAEXTTAUCA.oc' , 'CA.oc' , + tavg_2d_rad.fields: 'CA.bcEXTTAU' , 'CA.bc' , + 'CA.ocEXTTAU' , 'CA.oc' , 'CASCATAUCA.bc' , 'CA.bc' , 'CASCATAUCA.oc' , 'CA.oc' , 'DUEXTTAU' , 'DU' , diff --git a/parm/chem/CAP.rc b/parm/chem/CAP.rc index d8b0352e55..64425b1bb4 100644 --- a/parm/chem/CAP.rc +++ b/parm/chem/CAP.rc @@ -58,10 +58,10 @@ CAP_EXPORTS: MSA,SU msa SO2,SU so2 SO4,SU so4 - CAphobicCA.bc,CA.bc bc1 - CAphilicCA.bc,CA.bc bc2 - CAphobicCA.oc,CA.oc oc1 - CAphilicCA.oc,CA.oc oc2 + CA.bcphobic,CA.bc bc1 + CA.bcphilic,CA.bc bc2 + CA.ocphobic,CA.oc oc1 + CA.ocphilic,CA.oc oc2 NH3,NI nh3 NH4a,NI nh4a NO3an1,NI no3an1 diff --git a/parm/chem/DU2G_instance_DU.rc b/parm/chem/DU2G_instance_DU.rc index 1afed80ce1..c701efb128 100644 --- a/parm/chem/DU2G_instance_DU.rc +++ b/parm/chem/DU2G_instance_DU.rc @@ -41,6 +41,6 @@ pressure_lid_in_hPa: 0.01 emission_scheme: fengsha # choose among: fengsha, ginoux, k14 # FENGSHA settings -alpha: 2.0 -gamma: 0.8 +alpha: 0.04 +gamma: 1.0 vertical_to_horizontal_flux_ratio_limit: 2.e-04 diff --git a/parm/chem/ExtData.other b/parm/chem/ExtData.other index 56af1c7d9a..5eb1e1dd0b 100644 --- a/parm/chem/ExtData.other +++ b/parm/chem/ExtData.other @@ -10,13 +10,13 @@ TROPP 'Pa' Y N - 0.0 1.0 # Ginoux input files DU_SRC NA N Y - none none du_src ExtData/Dust/gocart.dust_source.v5a.x1152_y721.nc -# FENGSHA input files. Note: regridding should be N or E -DU_CLAY '1' Y E - none none clayfrac ExtData/Dust/FENGSHA_SOILGRIDS2017_GEFSv12_v1.0.nc -DU_SAND '1' Y E - none none sandfrac ExtData/Dust/FENGSHA_SOILGRIDS2017_GEFSv12_v1.0.nc -DU_SILT '1' Y E - none none siltfrac /dev/null -DU_SSM '1' Y E %y4-%m2-%d2T12:00:00 none none ssm ExtData/Dust/FENGSHA_SOILGRIDS2017_GEFSv12_v1.0.nc -DU_RDRAG '1' Y E - none none drag_part ExtData/Dust/FENGSHA_SOILGRIDS2017_GEFSv12_v1.0.nc -DU_UTHRES '1' Y E - none none uthres ExtData/Dust/FENGSHA_SOILGRIDS2017_GEFSv12_v1.0.nc +# FENGSHA input files. Note: regridding should be N or E - Use files with _FillValue != NaN +DU_CLAY '1' Y E - none none clayfrac ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_SAND '1' Y E - none none sandfrac ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_SILT '1' Y E - none none siltfrac /dev/null +DU_SSM '1' Y E - none none ssm /dev/null:1.0 +DU_RDRAG '1' Y E %y4-%m2-%d2t12:00:00 none none albedo_drag ExtData/Dust/FENGSHA_p81_10km_inputs.nc +DU_UTHRES '1' Y E - none none uthres ExtData/Dust/FENGSHA_p81_10km_inputs.nc #====== Sulfate Sources ================================================= # Anthropogenic (BF & FF) emissions -- allowed to input as two layers diff --git a/parm/config/config.anal b/parm/config/config.anal deleted file mode 100755 index 57c91ee5fd..0000000000 --- a/parm/config/config.anal +++ /dev/null @@ -1,177 +0,0 @@ -#!/bin/ksh -x - -########## config.anal ########## -# Analysis specific - -echo "BEGIN: config.anal" - -# Get task specific resources -. $EXPDIR/config.resources anal - -if [ $DONST = "YES" ]; then - . $EXPDIR/config.nsst -fi - -if [[ "$CDATE" = "$FDATE" && $EXP_WARM_START = ".false." ]]; then # Cold starting - export USE_RADSTAT="NO" -fi - -if [[ "$CDUMP" = "gfs" ]] ; then - export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. - export GENDIAG="NO" - export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' - export DIAG_TARBALL="NO" -fi - -export npe_gsi=$npe_anal - -if [[ "$CDUMP" == "gfs" ]] ; then - export npe_gsi=$npe_anal_gfs - export nth_anal=$nth_anal_gfs -fi - -# Set parameters specific to L127 -if [ $LEVS = "128" ]; then - export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," - export SETUP="gpstop=55,nsig_ext=56,$SETUP" -fi - -# Set namelist option for LETKF -export lobsdiag_forenkf=".false." # anal does not need to write out jacobians - # set to .true. in config.eobs and config.eupd - -if [ $OUTPUT_FILE = "nemsio" ]; then - export DO_CALC_INCREMENT="YES" - export DO_CALC_ANALYSIS="NO" -fi - -# Do not process the following datasets -export GSNDBF=${GSNDBF:-/dev/null} -export AMSREBF=${AMSREBF:-/dev/null} -export SSMITBF=${SSMITBF:-/dev/null} -export AMSR2BF=${AMSR2BF:-/dev/null} - - -# Set default values for info files and observation error -# NOTE: Remember to set PRVT in config.prep as OBERROR is set below -export CONVINFO=$FIXgsi/global_convinfo.txt -export OZINFO=$FIXgsi/global_ozinfo.txt -export SATINFO=$FIXgsi/global_satinfo.txt -export OBERROR=$FIXgsi/prepobs_errtable.global - - -# Use experimental dumps in EMC GFS v16 parallels -if [[ $RUN_ENVIR == "emc" ]]; then - export ABIBF="/dev/null" - if [[ "$CDATE" -ge "2019022800" ]] ; then - export ABIBF="$DMPDIR/${CDUMP}x.${PDY}/${cyc}/${CDUMP}.t${cyc}z.gsrcsr.tm00.bufr_d" - if [[ "$CDATE" -ge "2019111000" && "$CDATE" -le "2020052612" ]]; then - export ABIBF="$DMPDIR/${CDUMP}y.${PDY}/${cyc}/${CDUMP}.t${cyc}z.gsrcsr.tm00.bufr_d" - fi - fi - - export AHIBF="/dev/null" - if [[ "$CDATE" -ge "2019042300" ]]; then - export AHIBF="$DMPDIR/${CDUMP}x.${PDY}/${cyc}/${CDUMP}.t${cyc}z.ahicsr.tm00.bufr_d" - fi - - export HDOB=$DMPDIR/${CDUMP}x.${PDY}/${cyc}/${CDUMP}.t${cyc}z.hdob.tm00.bufr_d - - # Use dumps from NCO GFS v16 parallel - if [[ "$CDATE" -ge "2020103012" ]]; then - export ABIBF="" - export AHIBF="" - export HDOB="" - fi - - # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels - if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then - export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2019021900 - export OBERROR=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 - fi - - # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps - if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then - export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2019110706 - export OBERROR=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 - fi - - # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations - if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "2020052612" ]]; then - export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2020040718 - export OBERROR=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2020040718 - fi - - # Assimilate COSMIC-2 - if [[ "$CDATE" -ge "2020052612" && "$CDATE" -lt "2020082412" ]]; then - export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2020052612 - export OBERROR=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2020040718 - fi - - # Assimilate HDOB - if [[ "$CDATE" -ge "2020082412" && "$CDATE" -lt "2020091612" ]]; then - export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2020082412 - fi - - # Assimilate Metop-C GNSSRO - if [[ "$CDATE" -ge "2020091612" && "$CDATE" -lt "2021031712" ]]; then - export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2020091612 - fi - - # Assimilate DO-2 GeoOptics - if [[ "$CDATE" -ge "2021031712" && "$CDATE" -lt "2021091612" ]]; then - export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021031712 - fi - - # NOTE: - # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is - # identical to ../global_convinfo.txt. Thus, the logic below is not - # needed at this time. - # Assimilate COSMIC-2 GPS - # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then - # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 - # fi - - # Turn off assmilation of OMPS during period of bad data - if [[ "$CDATE" -ge "2020011600" && "$CDATE" -lt "2020011806" ]]; then - export OZINFO=$FIXgsi/gfsv16_historical/global_ozinfo.txt.2020011600 - fi - - - # Set satinfo for start of GFS v16 parallels - if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then - export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2019021900 - fi - - # Turn on assimilation of Metop-C AMSUA and MHS - if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020022012" ]]; then - export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2019110706 - fi - - # Turn off assimilation of Metop-A MHS - if [[ "$CDATE" -ge "2020022012" && "$CDATE" -lt "2021052118" ]]; then - export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2020022012 - fi - - # Turn off assimilation of S-NPP CrIS - if [[ "$CDATE" -ge "2021052118" && "$CDATE" -lt "2021092206" ]]; then - export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021052118 - fi - - # Turn off assimilation of MetOp-A IASI - if [[ "$CDATE" -ge "2021092206" && "$CDATE" -lt "2021102612" ]]; then - export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021092206 - fi - - # NOTE: - # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is - # identical to ../global_satinfo.txt. Thus, the logic below is not - # needed at this time - # - # Turn off assmilation of all Metop-A MHS - # if [[ "$CDATE" -ge "2021110312" && "$CDATE" -lt "YYYYMMDDHH" ]]; then - # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 - # fi -fi - -echo "END: config.anal" diff --git a/parm/config/config.analcalc b/parm/config/config.analcalc deleted file mode 100755 index 5866ce5ac6..0000000000 --- a/parm/config/config.analcalc +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/ksh -x - -########## config.analcalc ########## -# GFS post-anal specific (non-diag) - -echo "BEGIN: config.analcalc" - -# Get task specific resources -. $EXPDIR/config.resources analcalc - -echo "END: config.analcalc" diff --git a/parm/config/config.analdiag b/parm/config/config.analdiag deleted file mode 100755 index 285e614d37..0000000000 --- a/parm/config/config.analdiag +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/ksh -x - -########## config.analdiag ########## -# GFS post-anal specific (diag) - -echo "BEGIN: config.analdiag" - -# Get task specific resources -. $EXPDIR/config.resources analdiag - -echo "END: config.analdiag" diff --git a/parm/config/config.arch b/parm/config/config.arch deleted file mode 100755 index fca519c414..0000000000 --- a/parm/config/config.arch +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/ksh -x - -########## config.arch ########## -# Archive specific - -echo "BEGIN: config.arch" - -# Get task specific resources -. $EXPDIR/config.resources arch - -export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} -export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} -export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} - -#--online archive of nemsio files for fit2obs verification -export FITSARC="YES" -export FHMAX_FITS=132 -[[ "$FHMAX_FITS" -gt "$FHMAX_GFS" ]] && export FHMAX_FITS=$FHMAX_GFS - -#--starting and ending hours of previous cycles to be removed from rotating directory -export RMOLDSTD=144 -export RMOLDEND=24 - -#--keep forcing data for running gldas step -if [[ "$DO_GLDAS" == "YES" && "$CDUMP" == "gdas" ]]; then - [[ $RMOLDSTD -lt 144 ]] && export RMOLDSTD=144 -fi - -echo "END: config.arch" diff --git a/parm/config/config.awips b/parm/config/config.awips deleted file mode 100755 index 6167b91f7e..0000000000 --- a/parm/config/config.awips +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/ksh -x - -########## config.awips ########## -# GFS awips step specific - -echo "BEGIN: config.awips" - -# Get task specific resources -. $EXPDIR/config.resources awips - -export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG -export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 - -# No. of concurrent awips jobs -export NAWIPSGRP=42 - -echo "END: config.awips" diff --git a/parm/config/config.base.emc.dyn b/parm/config/config.base.emc.dyn deleted file mode 120000 index 6e9cfcec1a..0000000000 --- a/parm/config/config.base.emc.dyn +++ /dev/null @@ -1 +0,0 @@ -config.base.emc.dyn_hera \ No newline at end of file diff --git a/parm/config/config.earc b/parm/config/config.earc deleted file mode 100755 index 7cb1de235f..0000000000 --- a/parm/config/config.earc +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/ksh -x - -########## config.earc ########## -# Ensemble archive specific - -echo "BEGIN: config.earc" - -# Get task specific resources -. $EXPDIR/config.resources earc - -export NMEM_EARCGRP=10 - -#--starting and ending hours of previous cycles to be removed from rotating directory -export RMOLDSTD_ENKF=144 -export RMOLDEND_ENKF=24 - -echo "END: config.earc" diff --git a/parm/config/config.ecen b/parm/config/config.ecen deleted file mode 100755 index c9609e3ff8..0000000000 --- a/parm/config/config.ecen +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/ksh -x - -########## config.ecen ########## -# Ensemble recentering specific - -echo "BEGIN: config.ecen" - -# Get task specific resources -. $EXPDIR/config.resources ecen - -# Number of concurrent ecen jobs [1 implies sequential] -# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting -# below queries IAUFHRS_ENKF to determine NECENGRP -export NECENGRP=1 -if [ $DOIAU_ENKF = "YES" ]; then - ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) - ((ngrps++)) - export NECENGRP=$ngrps -fi - -echo "END: config.ecen" diff --git a/parm/config/config.ediag b/parm/config/config.ediag deleted file mode 100755 index 192b5d0b48..0000000000 --- a/parm/config/config.ediag +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/ksh -x - -########## config.ediag ########## -# GFS ensemble post-eobs specific - -echo "BEGIN: config.ediag" - -# Get task specific resources -. $EXPDIR/config.resources ediag - -echo "END: config.ediag" diff --git a/parm/config/config.efcs b/parm/config/config.efcs deleted file mode 100755 index 89b7684fc3..0000000000 --- a/parm/config/config.efcs +++ /dev/null @@ -1,87 +0,0 @@ -#!/bin/ksh -x - -########## config.efcs ########## -# Ensemble forecast specific, dependency: config.fcst - -echo "BEGIN: config.efcs" - -# Source model specific information that is resolution dependent -. $EXPDIR/config.fv3 $CASE_ENKF - -# Get task specific resources -. $EXPDIR/config.resources efcs - -export npe_fv3=$npe_efcs - -if [ $QUILTING = ".true." ]; then - export npe_fv3=$(echo " $npe_fv3 + $WRITE_GROUP * $WRTTASK_PER_GROUP" | bc) - export npe_efcs=$npe_fv3 -fi - -# Only use serial I/O for ensemble on Hera and Orion (lustre?) -case $machine in - "HERA" | "ORION") - export OUTPUT_FILETYPE_ATM="netcdf" - export OUTPUT_FILETYPE_SFC="netcdf" - ;; -esac - -# Number of enkf members per fcst job -export NMEM_EFCSGRP=2 -export RERUN_EFCSGRP="NO" - -# Turn off inline UPP for EnKF forecast -export WRITE_DOPOST=".false." - -# Stochastic physics parameters (only for ensemble forecasts) -export DO_SKEB="YES" -export SKEB=0.3 -export SKEB_TAU=21600. -export SKEB_LSCALE=250000. -export SKEBNORM=0 -export SKEB_NPASS=30 -export SKEB_VDOF=5 -export DO_SHUM="YES" -export SHUM=0.005 -export SHUM_TAU=21600. -export SHUM_LSCALE=500000. -export DO_SPPT="YES" -export SPPT=0.5 -export SPPT_TAU=21600. -export SPPT_LSCALE=500000. -export SPPT_LOGIT=".true." -export SPPT_SFCLIMIT=".true." - -if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then - if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" - else - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" - fi -else - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" -fi - -# FV3 model namelist parameters to over-ride -export restart_interval=${restart_interval:-6} - -# For IAU, write restarts at beginning of window also -if [ $DOIAU_ENKF = "YES" ]; then - export restart_interval="3 -1" -fi - -# wave model -export cplwav=.false. - -# ocean model resolution -case "$CASE_ENKF" in - "C48") export OCNRES=400;; - "C96") export OCNRES=100;; - "C192") export OCNRES=050;; - "C384") export OCNRES=025;; - "C768") export OCNRES=025;; - *) export OCNRES=025;; -esac -export ICERES=$OCNRES - -echo "END: config.efcs" diff --git a/parm/config/config.eobs b/parm/config/config.eobs deleted file mode 100755 index e46dde2f34..0000000000 --- a/parm/config/config.eobs +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/ksh -x - -########## config.eobs config.eomg ########## -# Ensemble innovation specific, dependency config.anal - -echo "BEGIN: config.eobs" - -# Get task specific resources -. $EXPDIR/config.resources eobs - -# Number of enkf members per innovation job -export NMEM_EOMGGRP=8 -export RERUN_EOMGGRP="YES" -export npe_gsi=$npe_eobs - -# GSI namelist options related to observer for EnKF -export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" -export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" -if [ $LEVS = "128" ]; then - export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," - export SETUP_INVOBS="gpstop=55,nsig_ext=56," -fi - - -export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. -export GENDIAG="YES" # Diagnostic files must be created for EnKF - -export lobsdiag_forenkf=".true." # write out jacobians from eobs - # need to specify .true. setting since config.anal sets to .false. - -echo "END: config.eobs" diff --git a/parm/config/config.epos b/parm/config/config.epos deleted file mode 100755 index 441a1ff995..0000000000 --- a/parm/config/config.epos +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/ksh -x - -########## config.epos ########## -# Ensemble post processing specific - -echo "BEGIN: config.epos" - -# Get task specific resources -. $EXPDIR/config.resources epos - -# No. of concurrent epos jobs [1 implies sequential] -export NEPOSGRP=7 -if [ $l4densvar = ".false." ]; then - export NEPOSGRP=3 -fi - -# Generate ensemble spread files -export ENKF_SPREAD="YES" - -echo "END: config.epos" diff --git a/parm/config/config.esfc b/parm/config/config.esfc deleted file mode 100755 index 53cbb09175..0000000000 --- a/parm/config/config.esfc +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/ksh -x - -########## config.esfc ########## -# Ensemble surface specific - -echo "BEGIN: config.esfc" - -# Get task specific resources -. $EXPDIR/config.resources esfc - -# With IAU only need surface analysis at start of IAU window. -# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at -# center of analysis window. - -if [ $DOIAU_ENKF = "YES" ]; then - export DOSFCANL_ENKF="NO" -fi - -echo "END: config.esfc" diff --git a/parm/config/config.eupd b/parm/config/config.eupd deleted file mode 100755 index 0e9d42e093..0000000000 --- a/parm/config/config.eupd +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/ksh -x - -########## config.eupd ########## -# Ensemble update specific, dependency config.anal - -echo "BEGIN: config.eupd" - -# Get task specific resources -. $EXPDIR/config.resources eupd - -export npe_enkf=$npe_eupd - -# Use NAM_ENKF below for serial EnKF -##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" - -# LETKF specific settings with model space localization -export modelspace_vloc=".true." # model space localization -export letkf_flag=".true." # use LETKF instead of serial filter -export getkf=".true." # Gain form of LETKF (needed for model-space localization) -export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) -export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can - # be reduced to speed up execution time. -export analpertwt=0.85 # relaxation to prior spread inflation factor -export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make - # sense for LETKF if model space localization on and nobsl_max>0) -export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) -export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. - -export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." - # need to specify .true. setting since config.anal sets to .false. - -export NAM_ENKF="smoothparm=35," - -echo "END: config.eupd" diff --git a/parm/config/config.fcst b/parm/config/config.fcst deleted file mode 100755 index 2d94fac6a4..0000000000 --- a/parm/config/config.fcst +++ /dev/null @@ -1,488 +0,0 @@ -#!/bin/ksh -x - -########## config.fcst ########## -# Forecast specific - -echo "BEGIN: config.fcst" - -# set -eu - -# Source model specific information that is resolution dependent -. $EXPDIR/config.fv3 $CASE - -# Turn off waves if not used for this CDUMP -case $WAVE_CDUMP in - both | $CDUMP ) ;; # Don't change - *) DO_WAVE="NO" ;; # Turn waves off -esac - -# Source component configs if necessary -for component in WAVE OCN ICE AERO; do - control="DO_${component}" - if [[ $(eval echo \$$control) == "YES" ]]; then - . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') - fi -done - -# Get task specific resources -. $EXPDIR/config.resources fcst -export domains_stack_size="16000000" - - -if [ $DONST = "YES" ]; then - . $EXPDIR/config.nsst -fi - -export print_esmf=".false." -export esmf_profile=".false." - -####################################################################### -# COUPLING COMPONENTS -export OCN_model="mom6" -export ICE_model="cice6" -export WAV_model="ww3" -export CHM_model="gocart" - -# cpl defaults - -export cpl=".false." -export cplflx=".false." -export cplice=".false." -export cplchm=".false." -export cplwav=".false." - -# cpl changes based on APP - -if [ $DO_COUPLED = "YES" ]; then - export cpl=".true." -fi -if [ $DO_AERO = "YES" ]; then - export cplchm=".true." -fi -if [ $DO_ICE = "YES" ]; then - export cplice=".true." - export cplflx=".true." -fi -if [ $DO_OCN = "YES" ]; then - export cplflx=".true." -fi -if [ $DO_WAVE = "YES" ]; then - export cplwav=".true." -fi - -####################################################################### -# COUPLING COMPONENTS -export use_coldstart=".false." - - -####################################################################### - -export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" -export FCSTEXECDIR="$HOMEgfs/exec" -export FCSTEXEC="ufs_model" - -####################################################################### -# Model configuration -export TYPE="nh" -export MONO="non-mono" - -# Use stratosphere h2o physics -export h2o_phys=".true." - -# Options of stratosphere O3 physics reaction coefficients -export new_o3forc="YES" - -export gwd_opt=2 - -# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc -# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) -# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. -if [ $gwd_opt -eq 1 ]; then - export knob_ugwp_version=0 - export do_ugwp=".false." - export do_tofd=".false." - export launch_level=$(echo "$LEVS/2.35" |bc) -fi - - -# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc -if [ $gwd_opt -eq 2 ]; then - - #--used for UFS p7 and p8a - #export knob_ugwp_version=1 - #export do_ugwp=".false." - #export do_tofd=".false." - #export do_ugwp_v0=".false." - #export do_ugwp_v1=".true." - #export do_ugwp_v0_orog_only=".false." - #export do_ugwp_v0_nst_only=".false." - #export do_gsl_drag_ls_bl=".true." - #export do_gsl_drag_ss=".true." - #export do_gsl_drag_tofd=".false." - #export do_ugwp_v1_orog_only=".false." - - #--used for UFS p8b - export knob_ugwp_version=0 - export do_ugwp=".false." - export do_tofd=".false." - export do_ugwp_v0=".true." - export do_ugwp_v1=".false." - export do_ugwp_v0_orog_only=".false." - export do_ugwp_v0_nst_only=".false." - export do_gsl_drag_ls_bl=".false." - export do_gsl_drag_ss=".true." - export do_gsl_drag_tofd=".false." - export do_ugwp_v1_orog_only=".false." - export launch_level=$(echo "$LEVS/2.35" |bc) - - #--used for GSL suite -##JKH export knob_ugwp_version=0 -##JKH export do_ugwp=".false." -##JKH export do_tofd="true." -##JKH export do_ugwp_v0=".true." -##JKH export do_ugwp_v1=".false." -##JKH export do_ugwp_v0_orog_only=".false." -##JKH export do_ugwp_v0_nst_only=".false." -##JKH export do_gsl_drag_ls_bl=".false." -##JKH export do_gsl_drag_ss=".true." -##JKH export do_gsl_drag_tofd=".false." -##JKH export do_ugwp_v1_orog_only=".false." -##JKH export launch_level=$(echo "$LEVS/2.35" |bc) -fi - - - - -# Sponge layer settings -export tau=10.0 -export rf_cutoff=7.5e2 -export d2_bg_k1=0.20 -### JKH -if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]] ; then - export d2_bg_k2=0.15 ### JKH - 10dec - export dz_min=2 - export dt_inner=40. ### JKH - 10dec -else - export d2_bg_k2=0.04 - export dz_min=6 -fi -if [ $LEVS = "128" ]; then export n_sponge=42; fi #127 layer -if [ $LEVS = "65" ]; then - if [ "CCPP_SUITE" = "FV3_RAP_cires_ugwp" -o "CCPP_SUITE" = "FV3_RAP_noah_sfcdiff_unified_ugwp" ]; then - export n_sponge=23 - else - export n_sponge=42 - fi -fi -if [ $LEVS = "128" -a "$CDUMP" = "gdas" ]; then - export tau=5.0 - export rf_cutoff=1.0e3 - export d2_bg_k1=0.20 - export d2_bg_k2=0.0 -fi - -# PBL/turbulence schemes -export hybedmf=".false." -if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then - export satmedmf=".false." - export isatmedmf=0 - if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then - export shal_cnv=".false." - else - export shal_cnv=".true." - fi - export do_mynnedmf=".true." - if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" ]] ; then - export do_mynnsfclay=".true." - else - export do_mynnsfclay=".false." - fi - export icloud_bl=1 - export bl_mynn_tkeadvect=.true. - export bl_mynn_edmf=1 - export bl_mynn_edmf_mom=1 -else - export satmedmf=".true." - export isatmedmf=1 -fi - -tbf="" -if [ $satmedmf = ".true." ]; then tbf="_satmedmf" ; fi - -# Radiation options -export IAER=1011 ; #spectral band mapping method for aerosol optical properties -export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) -export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) -export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) -export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes -export isubc_sw=2 -export isubc_lw=2 - -# RRTMGP radiation scheme -export do_RRTMGP=.false. -export doGP_cldoptics_LUT=.false. -export doGP_lwscat=.false. - -# LSM configuration -# NoahMP only -export iopt_sfc="3" -export iopt_trs="2" - -# Convection Options: 2-SASAS, 3-GF -if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then - export imfdeepcnv=3 - export imfshalcnv=-1 ## JKH - no shallow GF - export progsigma=.false. -elif [[ "$CCPP_SUITE" == "FV3_GFS_v16_gf" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then - export imfdeepcnv=3 - export imfshalcnv=3 - export progsigma=.false. -else - export imfdeepcnv=2 - if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then - export imfshalcnv=-1 - export progsigma=.false. - else - export imfshalcnv=2 - fi -fi - -# Microphysics configuration -export dnats=0 -export cal_pre=".true." -export do_sat_adj=".false." -export random_clds=".true." - -if [ $imp_physics -eq 99 ]; then # ZhaoCarr - export ncld=1 - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}" - export nwat=2 - -elif [ $imp_physics -eq 6 ]; then # WSM6 - export ncld=2 - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}" - export nwat=6 - -elif [ $imp_physics -eq 8 ]; then # Thompson - export nwat=6 - export cal_pre=".false." - export random_clds=".false." - export effr_in=".true." - export ttendlim="-999" - export dddmp=0.1 - export d4_bg=0.12 - if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_GFS_v16_thompson" ]] ; then - export ncld=5 - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" - export ltaerosol=.true. - export lradar=.true. - - ## GSL namelist changes - export vtdm4_nh_nonmono=0.03 ### JKH - 10dec - export nord=3 ### JKH - 10dec - export dt_inner=40. ### JKH - 10dec - if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then - export k_split=6 - export n_split=2 - fi - - export kord_tm=-11 ### JKH - 10dec - export kord_mt=11 ### JKH - 10dec - export kord_wz=11 ### JKH - 10dec - export kord_tr=11 ### JKH - 10dec - export d_con_nonmono=0.5 ### JKH - 10dec - export hord_mt_nh_nonmono=6 ### JKH - 10dec - export hord_xx_nh_nonmono=6 ### JKH - 10dec - else - export ncld=2 - export sedi_semi=.true. - export decfl=10 - if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf_mynn" ]] ; then - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke" - export ltaerosol=".true." - else - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" - export ltaerosol=".false." - fi - export lradar=".false." - export dt_inner=$((DELTIM/2)) - if [ $sedi_semi = .true. ]; then export dt_inner=$DELTIM ; fi - export hord_mt_nh_nonmono=5 - export hord_xx_nh_nonmono=5 - export vtdm4_nh_nonmono=0.02 - export nord=2 - fi - -elif [ $imp_physics -eq 11 ]; then # GFDL - export ncld=5 - if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl_satmedmf" - else - export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}" - fi - export nwat=6 - export dnats=1 - export cal_pre=".false." - export do_sat_adj=".true." - export random_clds=".false." - export lgfdlmprad=".true." - export effr_in=".true." - export reiflag=2 - - export hord_mt_nh_nonmono=5 - export hord_xx_nh_nonmono=5 - export vtdm4_nh_nonmono=0.02 - export nord=2 - export d4_bg=0.12 - export dddmp=0.1 - -else - echo "Unknown microphysics option, ABORT!" - -fi - -# Stochastic physics -export DO_SPPT=${DO_SPPT:-"NO"} -export DO_SKEB=${DO_SKEB:-"NO"} -export DO_SHUM=${DO_SHUM:-"NO"} -export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} -export DO_CA=${DO_CA:-"YES"} -export DO_OCN_SPPT=${DO_OCN_SPPT:-"NO"} -export DO_OCN_PERT_EPBL=${DO_OCN_PERT_EPBL:-"NO"} - -#coupling settings -export FRAC_GRID=".true." -export cplmode="nems_frac" -export psm_bc="1" - -export min_lakeice="0.15" -export min_seaice=${min_seaice:-"0.15"} -export use_cice_alb=${use_cice_alb:-".false."} - -export FSICL="0" -export FSICS="0" - -#--------------------------------------------------------------------- - -# ideflate: netcdf zlib lossless compression (0-9): 0 no compression -# nbits: netcdf lossy compression level (0-32): 0 lossless -export ideflate=1 -export nbits=14 -export ishuffle=0 -# compression for RESTART files written by FMS -export shuffle=1 -export deflate_level=1 - -#--------------------------------------------------------------------- -# Disable the use of coupler.res; get model start time from model_configure -export USE_COUPLER_RES="NO" - -if [[ "$CDUMP" == "gdas" ]] ; then # GDAS cycle specific parameters - - # Variables used in DA cycling - if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then - if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_gsl" - else - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" - fi - else - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" - fi - - # Write restart files, where $number is current model start time. - # restart_interval: $number - # number=0, writes out restart files at the end of forecast. - # number>0, writes out restart files at the frequency of $number and at the end of forecast. - # restart_interval: "$number -1" - # writes out restart files only once at $number forecast hour. - # restart_interval: "$number1 $number2 $number3 ..." - # writes out restart file at the specified forecast hours - export restart_interval=${restart_interval:-6} - - # For IAU, write restarts at beginning of window also - if [ $DOIAU = "YES" ]; then - export restart_interval="3 6" - fi - - # Choose coupling with wave - if [ $DO_WAVE = "YES" ]; then export cplwav=".true." ; fi - - # Turn on dry mass adjustment in GDAS - export adjust_dry_mass=".true." - -elif [[ "$CDUMP" == "gfs" ]] ; then # GFS cycle specific parameters - - # Write more variables to output - if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then - if [ $CCPP_SUITE = "FV3_RAP_cires_ugwp" ]; then - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsl_ruc" - elif [ $CCPP_SUITE = "FV3_RAP_noah_sfcdiff_unified_ugwp" ]; then - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_gsl" - else - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" - fi - else - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_orig" - fi - - # Write gfs restart files to rerun fcst from any break point - export restart_interval_gfs=${restart_interval_gfs:-0} - if [ $restart_interval_gfs -le 0 ]; then - #JKH export restart_interval="$FHMAX_GFS" - export restart_interval=0 - else - rst_list="" - IAU_OFFSET=${IAU_OFFSET:-0} - [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 - xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) - while [ $xfh -le $FHMAX_GFS ]; do - rst_list="$rst_list $xfh" - xfh=$((xfh+restart_interval_gfs)) - done - export restart_interval="$rst_list" - fi - - if [ $DO_AERO = "YES" ]; then - # Make sure a restart file is written at the cadence time - if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then - export restart_interval="$STEP_GFS $restart_interval" - fi - fi - - # Choose coupling with wave - if [ $DO_WAVE = "YES" -a "$WAVE_CDUMP" != "gdas" ]; then - export cplwav=".true." - fi - - # Turn off dry mass adjustment in GFS - export adjust_dry_mass=".false." - - # Write each restart file in 16 small files to save time - if [ $CASE = C768 ]; then - export io_layout="4,4" - else - export io_layout="1,1" - fi - -fi - -if [[ $DO_COUPLED = "YES" ]] ; then # coupled model - export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_cpl" -fi - -if [ $DO_AERO = "YES" ]; then # temporary settings for aerosol coupling - export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" - export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" - export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) - export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" - export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" - export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" - export dnats_aero="${aero_diag_tracers:-0}" -fi - -# Remember config.efcs will over-ride these values for ensemble forecasts -# if these variables are re-defined there. -# Otherwise, the ensemble forecast will inherit from config.fcst - -echo "END: config.fcst" diff --git a/parm/config/config.fv3 b/parm/config/config.fv3 deleted file mode 100755 index 72798a9def..0000000000 --- a/parm/config/config.fv3 +++ /dev/null @@ -1,200 +0,0 @@ -#!/bin/ksh -x - -########## config.fv3 ########## -# FV3 model resolution specific parameters -# e.g. time-step, processor layout, physics and dynamics parameters -# This config sets default variables for FV3 for a given resolution -# User can over-ride after sourcing this config file - -if [ $# -ne 1 ]; then - - echo "Must specify an input resolution argument to set variables!" - echo "argument can be any one of the following:" - echo "C48 C96 C192 C384 C768 C1152 C3072" - exit 1 - -fi - -case_in=$1 - -echo "BEGIN: config.fv3" - - -if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export npe_node_max=28 -elif [[ "$machine" = "WCOSS_C" ]]; then - export npe_node_max=24 -elif [[ "$machine" = "JET" ]]; then - if [[ "$PARTITION_BATCH" = "xjet" ]]; then - export npe_node_max=24 - elif [[ "$PARTITION_BATCH" = "vjet" || "$PARTITION_BATCH" = "sjet" ]]; then - export npe_node_max=16 - elif [[ "$PARTITION_BATCH" = "kjet" ]]; then - export npe_node_max=40 - fi -elif [[ "$machine" = "HERA" ]]; then - export npe_node_max=40 -elif [[ "$machine" = "ORION" ]]; then - export npe_node_max=40 -fi - -# (Standard) Model resolution dependent variables -case $case_in in - "C48") - export DELTIM=1200 - export layout_x=3 - export layout_y=2 - export layout_x_gfs=3 - export layout_y_gfs=2 - export nth_fv3=1 - export nth_fv3_gfs=1 - export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - if [[ "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]]; then export cdmbgwd="1.0,1.0,1.0,1.0"; fi - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=$npe_node_max - export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_GFS=$npe_node_max - export WRTIOBUF="1M" - ;; - "C96") - export DELTIM=450 - export layout_x=6 - export layout_y=4 - export layout_x_gfs=6 - export layout_y_gfs=4 - export nth_fv3=1 - export nth_fv3_gfs=1 - export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - if [[ "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]]; then export cdmbgwd="1.0,1.0,1.0,1.0"; fi - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=$npe_node_max - export WRITE_GROUP_GFS=1 - export WRTTASK_PER_GROUP_GFS=$npe_node_max - export WRTIOBUF="4M" - export n_split=6 - ;; - "C192") - export DELTIM=450 - export layout_x=4 - export layout_y=6 - export layout_x_gfs=4 - export layout_y_gfs=6 - export nth_fv3=2 - export nth_fv3_gfs=2 - export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - if [[ "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]]; then export cdmbgwd="1.0,1.0,1.0,1.0"; fi - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=$npe_node_max - export WRITE_GROUP_GFS=2 - export WRTTASK_PER_GROUP_GFS=$npe_node_max - export WRTIOBUF="8M" - ;; - "C384") - export DELTIM=${DELTIM:-300} - export layout_x=6 - export layout_y=8 - export layout_x_gfs=${layout_x_gfs:-8} - export layout_y_gfs=${layout_y_gfs:-12} - export nth_fv3=2 - export nth_fv3_gfs=${nth_fv3_gfs:-2} - export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export WRITE_GROUP=1 - export WRTTASK_PER_GROUP=$npe_node_max - export WRITE_GROUP_GFS=${WRITE_GROUP_GFS:-2} - export WRTTASK_PER_GROUP_GFS=${WRTTASK_PER_GROUP_GFS:-$npe_node_max} - export WRTIOBUF=${WRTIOBUF:-"16M"} - ;; - "C768") - if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then - if [ $LEVS = "128" ]; then - export DELTIM=120 - else - #JKHexport DELTIM=225 - export DELTIM=180 - fi - else - if [[ "$CCPP_SUITE" == "FV3_GFS_v16_mynn" ]] ; then - export DELTIM=100 - else - export DELTIM=150 - fi - fi - export layout_x=8 - export layout_y=12 - #JKHexport layout_x_gfs=16 - export layout_x_gfs=12 ## JKH - export layout_y_gfs=12 - export nth_fv3=4 - export nth_fv3_gfs=4 - export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - if [[ "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" ]]; then export cdmbgwd="4.0,0.15,1.0,1.0"; fi - if [[ "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]]; then export cdmbgwd="1.0,1.0,1.0,1.0"; fi - export WRITE_GROUP=2 - export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) - export WRITE_GROUP_GFS=2 ## JKH - export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) - export WRTIOBUF="32M" - ;; - "C1152") - export DELTIM=120 - export layout_x=8 - export layout_y=16 - export layout_x_gfs=8 - export layout_y_gfs=16 - export nth_fv3=4 - export nth_fv3_gfs=4 - export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - if [[ "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]]; then export cdmbgwd="1.0,1.0,1.0,1.0"; fi - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) - export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) - export WRTIOBUF="48M" - ;; - "C3072") - export DELTIM=90 - export layout_x=16 - export layout_y=32 - export layout_x_gfs=16 - export layout_y_gfs=32 - export nth_fv3=4 - export nth_fv3_gfs=4 - export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling - export WRITE_GROUP=4 - export WRTTASK_PER_GROUP=$(echo "3*$npe_node_max" |bc) - export WRITE_GROUP_GFS=4 - export WRTTASK_PER_GROUP_GFS=$(echo "3*$npe_node_max" |bc) - export WRTIOBUF="64M" - ;; - *) - echo "grid $case_in not supported, ABORT!" - exit 1 - ;; -esac - -# Calculate chunksize based on resolution -RESTILE=$(echo $case_in |cut -c2-) -export ichunk2d=$((4*RESTILE)) -export jchunk2d=$((2*RESTILE)) -export ichunk3d=$((4*RESTILE)) -export jchunk3d=$((2*RESTILE)) -export kchunk3d=1 - -# Determine whether to use parallel NetCDF based on resolution -case $case_in in - "C48" | "C96" | "C192") - export OUTPUT_FILETYPE_ATM="netcdf_parallel" - export OUTPUT_FILETYPE_SFC="netcdf" - ;; - "C384" | "C768" | "C1152" | "C3072") -#JKH if [[ "$machine" = "JET" ]]; then - export OUTPUT_FILETYPE_ATM="netcdf" ## JKH -#JKH else -#JKH export OUTPUT_FILETYPE_ATM="netcdf_parallel" -#JKH fi -#JKH export OUTPUT_FILETYPE_SFC="netcdf_parallel" - export OUTPUT_FILETYPE_SFC="netcdf" - ;; -esac - -echo "END: config.fv3" diff --git a/parm/config/config.gempak b/parm/config/config.gempak deleted file mode 100755 index 2bc49dcf17..0000000000 --- a/parm/config/config.gempak +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/ksh -x - -########## config.gempak ########## -# GFS gempak step specific - -echo "BEGIN: config.gempak" - -# Get task specific resources -. $EXPDIR/config.resources gempak - -export GEMPAKSH=$HOMEgfs/jobs/JGFS_ATMOS_GEMPAK - -echo "END: config.gempak" diff --git a/parm/config/config.getic b/parm/config/config.getic deleted file mode 100755 index 4671cc9c4a..0000000000 --- a/parm/config/config.getic +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/ksh -x - -########## config.getic ########## -# Fetching GFS initial conditions specific - -echo "BEGIN: config.getic" - -# Get task specific resources -. $EXPDIR/config.resources getic - -export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs -export gfs_ver="v16" # Default = v16 -export OPS_RES=${OPS_RES:-"C768"} # Operational resolution - -export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd -export GDASINIT_DIR=${UFS_DIR}/util/gdas_init - -export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory -export GETICSH=${GDASINIT_DIR}/get_v16.data.sh - -if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input - export GETICSH=${GDASINIT_DIR}/get_v16retro.data.sh - if [[ "$CDATE" -lt "2019060106" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e - elif [[ "$CDATE" -lt "2019090100" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e - elif [[ "$CDATE" -lt "2019101706" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e - elif [[ "$CDATE" -lt "2020122200" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 - elif [[ "$CDATE" -le "2021032506" ]]; then - HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n - else - set +x - echo NO DATA FOR $CDATE - exit 3 - fi -elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input - # No ENKF data prior to 2012/05/21/00z - if [[ "$CDATE" -lt "2012052100" ]]; then - set +x - echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA - elif [[ "$CDATE" -lt "2016051000" ]]; then - export gfs_ver=v12 - export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh - elif [[ "$CDATE" -lt "2017072000" ]]; then - export gfs_ver=v13 - export GETICSH=${GDASINIT_DIR}/get_pre-v14.data.sh - elif [[ "$CDATE" -lt "2019061200" ]]; then - export gfs_ver=v14 - export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh - elif [[ "$CDATE" -lt "2021032100" ]]; then - export gfs_ver=v15 - export GETICSH=${GDASINIT_DIR}/get_${gfs_ver}.data.sh - elif [[ "$CDATE" -lt "2021032106" ]]; then - # The way the v16 switch over was done, there is no complete - # set of v16 or v15 data for 2021032100. And although - # v16 was officially implemented 2021032212, the v16 prod - # tarballs were archived starting 2021032106. - set +x - echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 - exit 1 - fi -fi - -echo "END: config.getic" diff --git a/parm/config/config.gldas b/parm/config/config.gldas deleted file mode 100755 index 4410c2ceab..0000000000 --- a/parm/config/config.gldas +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/ksh -x - -########## config.gldas ########## -# GDAS gldas step specific - -echo "BEGIN: config.gldas" - -# Get task specific resources -. $EXPDIR/config.resources gldas - -export GLDASSH=$HOMEgfs/scripts/exgdas_atmos_gldas.sh -export gldas_spinup_hours=72 -export CPCGAUGE=$DMPDIR -export FINDDATE=$HOMEgfs/util/ush/finddate.sh - -echo "END: config.gldas" diff --git a/parm/config/config.metp b/parm/config/config.metp deleted file mode 100755 index 2c39d9b431..0000000000 --- a/parm/config/config.metp +++ /dev/null @@ -1,99 +0,0 @@ -#!/bin/ksh -x - -########## config.metp ########## -# METplus verification step specific - -echo "BEGIN: config.metp" - -# Get task specific resources -. $EXPDIR/config.resources metp - -export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus -export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus -export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus - - -#---------------------------------------------------------- -# METplus: Verify grid-to-grid, grid-to-obs, precipitation options -#---------------------------------------------------------- -## EMC_VERIF_GLOBAL SETTINGS -export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd -export VERIF_GLOBALSH=$HOMEverif_global/ush/run_verif_global_in_global_workflow.sh -## INPUT DATA SETTINGS -export model=$PSLOT -export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" -export model_hpss_dir=$ATARDIR/.. -export get_data_from_hpss="NO" -export hpss_walltime="10" -## OUTPUT SETTINGS -export model_stat_dir=$ARCDIR/.. -export make_met_data_by="VALID" -export SENDMETVIEWER="NO" -## DATE SETTINGS -export VRFYBACK_HRS="0" -## METPLUS SETTINGS -export METplus_verbosity="INFO" -export MET_verbosity="2" -export log_MET_output_to_METplus="yes" -# GRID-TO-GRID STEP 1: gfsmetpg2g1 -export g2g1_type_list="anom pres sfc" -export g2g1_anom_truth_name="self_anl" -export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_anom_fhr_min=$FHMIN_GFS -export g2g1_anom_fhr_max=$FHMAX_GFS -export g2g1_anom_grid="G002" -export g2g1_anom_gather_by="VSDB" -export g2g1_pres_truth_name="self_anl" -export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_pres_fhr_min=$FHMIN_GFS -export g2g1_pres_fhr_max=$FHMAX_GFS -export g2g1_pres_grid="G002" -export g2g1_pres_gather_by="VSDB" -export g2g1_sfc_truth_name="self_f00" -export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" -export g2g1_sfc_fhr_min=$FHMIN_GFS -export g2g1_sfc_fhr_max=$FHMAX_GFS -export g2g1_sfc_grid="G002" -export g2g1_sfc_gather_by="VSDB" -export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" -export g2g1_mv_database_group="NOAA NCEP" -export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" -# GRID-TO-OBS STEP 1: gfsmetpg2o1 -export g2o1_type_list="upper_air conus_sfc" -export g2o1_upper_air_msg_type_list="ADPUPA" -export g2o1_upper_air_vhr_list="00 06 12 18" -export g2o1_upper_air_fhr_min=$FHMIN_GFS -export g2o1_upper_air_fhr_max="240" -export g2o1_upper_air_grid="G003" -export g2o1_upper_air_gather_by="VSDB" -export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" -export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" -export g2o1_conus_sfc_fhr_min=$FHMIN_GFS -export g2o1_conus_sfc_fhr_max="240" -export g2o1_conus_sfc_grid="G104" -export g2o1_conus_sfc_gather_by="VSDB" -export g2o1_polar_sfc_msg_type_list="IABP" -export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" -export g2o1_polar_sfc_fhr_min=$FHMIN_GFS -export g2o1_polar_sfc_fhr_max="240" -export g2o1_polar_sfc_grid="G219" -export g2o1_polar_sfc_gather_by="VSDB" -export g2o1_prepbufr_data_run_hpss="NO" -export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" -export g2o1_mv_database_group="NOAA NCEP" -export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" -# PRECIP STEP 1: gfsmetppcp1 -export precip1_type_list="ccpa_accum24hr" -export precip1_ccpa_accum24hr_model_bucket="06" -export precip1_ccpa_accum24hr_model_var="APCP" -export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" -export precip1_ccpa_accum24hr_fhr_min=$FHMIN_GFS -export precip1_ccpa_accum24hr_fhr_max="180" -export precip1_ccpa_accum24hr_grid="G211" -export precip1_ccpa_accum24hr_gather_by="VSDB" -export precip1_obs_data_run_hpss="NO" -export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" -export precip1_mv_database_group="NOAA NCEP" -export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" - -echo "END: config.metp" diff --git a/parm/config/config.nsst b/parm/config/config.nsst deleted file mode 100644 index 2eb05c181d..0000000000 --- a/parm/config/config.nsst +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/ksh -x - -########## config.nsst ########## -# NSST specific - -echo "BEGIN: config.nsst" - -# NSST parameters contained within nstf_name - -# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled -export NST_MODEL=2 - -# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, -export NST_SPINUP=0 -if [[ "$CDATE" -lt "2017072000" || "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then - export NST_SPINUP=1 -fi - -# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON -if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then - export NST_RESV=1 -else - export NST_RESV=0 -fi - -# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) -export ZSEA1=0 -if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then - export ZSEA2=5 -else - export ZSEA2=0 -fi - -export NST_GSI=3 # default 0: No NST info at all; - # 1: Input NST info but not used in GSI; - # 2: Input NST info, used in CRTM simulation, no Tr analysis - # 3: Input NST info, used in both CRTM simulation and Tr analysis -export NSTINFO=0 # number of elements added in obs. data array (default = 0) -if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi - -echo "END: config.nsst" diff --git a/parm/config/config.post b/parm/config/config.post deleted file mode 100755 index db811e3691..0000000000 --- a/parm/config/config.post +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/ksh -x - -########## config.post ########## -# Post specific - -echo "BEGIN: config.post" - -# Get task specific resources -. $EXPDIR/config.resources post - -# Convert nemsio files to grib files using post job -#------------------------------------------- - -# No. of concurrent post jobs [0 implies sequential] -export NPOSTGRP=42 -export OUTTYP=4 -export MODEL_OUT_FORM=binarynemsiompiio -if [ $OUTPUT_FILE = "netcdf" ]; then - export MODEL_OUT_FORM=netcdfpara -fi - -# Post driver job that calls gfs_nceppost.sh and downstream jobs -export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_NCEPPOST" -export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" -export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" - -export POSTGPSH="$HOMEpost/ush/gfs_nceppost.sh" -export POSTGPEXEC="$HOMEpost/exec/gfs_ncep_post" -export GOESF=NO # goes image -export FLXF=YES # grib2 flux file written by post - -export npe_postgp=$npe_post -export nth_postgp=1 - -export GFS_DOWNSTREAM="YES" -#JKHexport downset=2 -export downset=1 ## JKH (removes creation of pgrb2b files) - -export GRIBVERSION='grib2' -export SENDCOM="YES" - -echo "END: config.post" diff --git a/parm/config/config.postsnd b/parm/config/config.postsnd deleted file mode 100755 index d64b401cdb..0000000000 --- a/parm/config/config.postsnd +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/ksh -x - -########## config.postsnd ########## -# GFS bufr sounding step specific - -echo "BEGIN: config.postsnd" - -# Get task specific resources -. $EXPDIR/config.resources postsnd - -export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND -export ENDHOUR=180 -if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi - -echo "END: config.postsnd" diff --git a/parm/config/config.prep b/parm/config/config.prep deleted file mode 100755 index c04be70a09..0000000000 --- a/parm/config/config.prep +++ /dev/null @@ -1,68 +0,0 @@ -#!/bin/ksh -x - -########## config.prep ########## -# Prep step specific - -echo "BEGIN: config.prep" - -# Get task specific resources -. $EXPDIR/config.resources prep - -export DO_MAKEPREPBUFR="YES" # if NO, will copy prepbufr from globaldump - -# Relocation and syndata QC -export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} -[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" -export DO_RELOCATE="NO" -export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" -export SENDCOM=YES - -export COMINsyn=${COMINsyn:-${COMROOT}/gfs/prod/syndat} - -export HOMERELO=$HOMEgfs -export EXECRELO=${HOMERELO}/exec -export FIXRELO=${HOMERELO}/fix/fix_am -export USHRELO=${HOMERELO}/ush - -# Adjust observation error for GFS v16 parallels -# -# NOTE: Remember to set OBERROR in config.anal as PRVT is set below -# -# Set default prepobs_errtable.global -export PRVT=$FIXgsi/prepobs_errtable.global - - -# Set prepobs.errtable.global for GFS v16 retrospective parallels -if [[ $RUN_ENVIR == "emc" ]]; then - if [[ "$CDATE" -ge "2019021900" && "$CDATE" -lt "2019110706" ]]; then - export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 - fi - -# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps - if [[ "$CDATE" -ge "2019110706" && "$CDATE" -lt "2020040718" ]]; then - export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 - fi - -# NOTE: -# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is -# identical to ../prepobs_errtable.global. Thus, the logic below is not -# needed at this time - -# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations -# if [[ "$CDATE" -ge "2020040718" && "$CDATE" -lt "YYYMMDDHH" ]]; then -# export PRVT=$EXPDIR/prepobs_errtable.global -# fi - -fi - -# NSST bufr was created with a different set of files prior to 2020102200 -# See comments at the end of -# https://github.com/NOAA-EMC/global-workflow/issues/313 -if [[ "$CDATE" -ge "2020102200" ]]; then - export DTYPS_nsst='sfcshp tesac bathy trkob' -else - export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' -fi -export DO_MAKE_NSSTBUFR="NO" # if NO, will copy nsstbufr from globaldump - -echo "END: config.prep" diff --git a/parm/config/config.prepbufr b/parm/config/config.prepbufr deleted file mode 100755 index 904d946774..0000000000 --- a/parm/config/config.prepbufr +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/ksh -x - -########## config.prepbufr ########## -# PREPBUFR specific configuration - -echo "BEGIN: config.prepbufr" - -# Get task specific resources -. $EXPDIR/config.resources prepbufr - -# Set variables - -if [ $machine = "HERA" ]; then - export GESROOT=/scratch1/NCEPDEV/rstprod # set by module prod_envir on WCOSS_C -elif [ $machine = "ORION" ]; then - export GESROOT=/dev/null -fi - -echo "END: config.prepbufr" diff --git a/parm/config/config.resources b/parm/config/config.resources deleted file mode 100755 index 7510f60e91..0000000000 --- a/parm/config/config.resources +++ /dev/null @@ -1,568 +0,0 @@ -#!/bin/ksh -x - -########## config.resources ########## -# Set resource information for job tasks -# e.g. walltime, node, cores per node, memory etc. - -if [ $# -ne 1 ]; then - - echo "Must specify an input task argument to set resource variables!" - echo "argument can be any one of the following:" - echo "getic init coupled_ic" - echo "anal analcalc analdiag gldas fcst post vrfy metp arch echgres" - echo "eobs ediag eomg eupd ecen esfc efcs epos earc" - echo "init_chem mom6ic ocnpost" - echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" - echo "wavegempak waveawipsbulls waveawipsgridded" - echo "postsnd awips gempak" - echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" - exit 1 - -fi - -step=$1 - -echo "BEGIN: config.resources" - -if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export npe_node_max=28 - if [ "$QUEUE" = "dev2" -o "$QUEUE" = "devonprod2" -o "$QUEUE" = "devmax2" ]; then # WCOSS Dell 3.5 - export npe_node_max=40 - fi -elif [[ "$machine" = "WCOSS_C" ]]; then - export npe_node_max=24 -elif [[ "$machine" = "JET" ]]; then - if [[ "$PARTITION_BATCH" = "xjet" ]]; then - export npe_node_max=24 - elif [[ "$PARTITION_BATCH" = "vjet" || "$PARTITION_BATCH" = "sjet" ]]; then - export npe_node_max=16 - elif [[ "$PARTITION_BATCH" = "kjet" ]]; then - export npe_node_max=40 - fi -elif [[ "$machine" = "HERA" ]]; then - export npe_node_max=40 -elif [[ "$machine" = "ORION" ]]; then - export npe_node_max=40 -fi - -if [ $step = "prep" -o $step = "prepbufr" ]; then - - eval "export wtime_$step='00:45:00'" - eval "export npe_$step=4" - eval "export npe_node_$step=2" - eval "export nth_$step=1" - eval "export memory_$step=40G" - -elif [ $step = "waveinit" ]; then - - export wtime_waveinit="00:10:00" - export npe_waveinit=12 - export nth_waveinit=1 - export npe_node_waveinit=$(echo "$npe_node_max / $nth_waveinit" | bc) - export NTASKS=${npe_waveinit} - -elif [ $step = "waveprep" ]; then - - export wtime_waveprep="00:30:00" - export npe_waveprep=65 - export nth_waveprep=1 - export npe_node_waveprep=$(echo "$npe_node_max / $nth_waveprep" | bc) - export NTASKS=${npe_waveprep} - -elif [ $step = "wavepostsbs" ]; then - - export wtime_wavepostsbs="06:00:00" - export npe_wavepostsbs=10 - export nth_wavepostsbs=1 - export npe_node_wavepostsbs=$(echo "$npe_node_max / $nth_wavepostsbs" | bc) - export NTASKS=${npe_wavepostsbs} - -elif [ $step = "wavepostbndpnt" ]; then - - export wtime_wavepostbndpnt="02:00:00" - export npe_wavepostbndpnt=280 - export nth_wavepostbndpnt=1 - export npe_node_wavepostbndpnt=$(echo "$npe_node_max / $nth_wavepostbndpnt" | bc) - export NTASKS=${npe_wavepostbndpnt} - -elif [ $step = "wavepostbndpntbll" ]; then - - export wtime_wavepostbndpntbll="01:00:00" - export npe_wavepostbndpntbll=280 - export nth_wavepostbndpntbll=1 - export npe_node_wavepostbndpntbll=$(echo "$npe_node_max / $nth_wavepostbndpntbll" | bc) - export NTASKS=${npe_wavepostbndpntbll} - -elif [ $step = "wavepostpnt" ]; then - - export wtime_wavepostpnt="02:00:00" - export npe_wavepostpnt=280 - export nth_wavepostpnt=1 - export npe_node_wavepostpnt=$(echo "$npe_node_max / $nth_wavepostpnt" | bc) - export NTASKS=${npe_wavepostpnt} - -elif [ $step = "wavegempak" ]; then - - export wtime_wavegempak="01:00:00" - export npe_wavegempak=$npe_node_max - export nth_wavegempak=1 - export npe_node_wavegempak=$(echo "$npe_node_max / $nth_wavegempak" | bc) - export NTASKS=${npe_wavegempak} - -elif [ $step = "waveawipsbulls" ]; then - - export wtime_waveawipsbulls="00:30:00" - export npe_waveawipsbulls=$npe_node_max - export nth_waveawipsbulls=1 - export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) - export NTASKS=${npe_waveawipsbulls} - -elif [ $step = "waveawipsgridded" ]; then - - export wtime_waveawipsgridded="00:30:00" - export npe_waveawipsgridded=$npe_node_max - export nth_waveawipsgridded=1 - export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) - export NTASKS=${npe_waveawipsgridded} - -elif [ $step = "anal" ]; then - - export wtime_anal="01:00:00" - export npe_anal=1000 - export nth_anal=5 - export npe_anal_gfs=1000 - if [ $CASE = "C384" ]; then - export npe_anal=400 - export npe_anal_gfs=400 - fi - if [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then - export npe_anal=84 - export npe_anal_gfs=84 - fi - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export nth_anal=4 - fi - export nth_anal_gfs=$nth_anal - export npe_node_anal=$(echo "$npe_node_max / $nth_anal" | bc) - export nth_cycle=$nth_anal - if [[ "$machine" == "WCOSS_C" ]]; then export memory_anal="3072M"; fi - -elif [ $step = "analcalc" ]; then - - export wtime_analcalc="00:10:00" - export npe_analcalc=127 - export nth_analcalc=1 - export npe_node_analcalc=$npe_node_max - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_analcalc=127 ; fi - -elif [ $step = "analdiag" ]; then - - export wtime_analdiag="00:10:00" - export npe_analdiag=112 - export nth_analdiag=1 - export npe_node_analdiag=$npe_node_max - if [[ "$machine" == "WCOSS_C" ]]; then export memory_analdiag="3072M"; fi - -elif [ $step = "gldas" ]; then - - export wtime_gldas="00:10:00" - export npe_gldas=96 - export nth_gldas=1 - export npe_node_gldas=$npe_node_max - export npe_gaussian=96 - export nth_gaussian=1 - export npe_node_gaussian=24 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_gldas=112 ; fi - if [[ "$machine" == "WCOSS_C" ]]; then export memory_gldas="3072M"; fi - -elif [ $step = "fcst" ]; then - - export wtime_fcst="00:30:00" - if [ $CASE = "C768" ]; then - export wtime_fcst_gfs="06:00:00" - elif [ $CASE = "C384" ]; then - export wtime_fcst_gfs="06:00:00" - else - export wtime_fcst_gfs="03:00:00" - fi - - # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined - CDUMP_LIST=${CDUMP:-"gdas gfs"} - for CDUMP in $CDUMP_LIST; do - if [[ "$CDUMP" == "gfs" ]]; then - export layout_x=$layout_x_gfs - export layout_y=$layout_y_gfs - export WRITE_GROUP=$WRITE_GROUP_GFS - export WRTTASK_PER_GROUP=$WRTTASK_PER_GROUP_GFS - fi - - (( ATMPETS = layout_x * layout_y * 6 )) - - # Mediator only uses the atm model PETS or less - export MEDPETS=${MEDPETS:-ATMPETS} - - if [[ $DO_AERO == "YES" ]]; then - # Aerosol model only uses the atm model PETS - export CHMPETS=$ATMPETS - # Aerosol model runs on same PETs as ATM, so don't add to $NTASKS_TOT - fi - - # If using in-line post, add the write tasks to the ATMPETS - if [[ $QUILTING == ".true." ]]; then - (( ATMPETS = ATMPETS + WRITE_GROUP * WRTTASK_PER_GROUP )) - fi - export ATMPETS - NTASKS_TOT=$ATMPETS - - export nth_fcst=${nth_fv3:-2} - export nth_fcst_gfs=${nth_fv3_gfs:-2} - - export npe_node_fcst=$(echo "$npe_node_max / $nth_fcst" | bc) - export npe_node_fcst_gfs=$(echo "$npe_node_max / $nth_fcst_gfs" | bc) - - if [[ "$machine" == "WCOSS_C" ]]; then export memory_fcst="1024M"; fi - - if [[ $DO_WAVE == "YES" ]]; then - case $waveGRD in - 'gnh_10m aoc_9km gsh_15m') export WAVPETS=140 ;; - 'gwes_30m') export WAVPETS=160 ;; - *) - echo "FATAL: Number of PEs not defined for wave grid '$waveGRD'" - echo " Please add an entry to config.resources within fcst for this grid" - exit 3 - esac - (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) - fi - - if [[ $DO_OCN == "YES" ]]; then - case $OCNRES in - # Except for 025, these are guesses for now - 100) export OCNPETS=20 ;; - 050) export OCNPETS=60 ;; - 025) export OCNPETS=220 ;; - *) - echo "FATAL: Number of PEs not defined for ocean resolution '$OCNRES'" - echo " Please add an entry to config.resources within fcst for this resolution" - exit 3 - esac - (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) - fi - - if [[ $DO_ICE == "YES" ]]; then - case $ICERES in - # Except for 025, these are guesses for now - 100) export ICEPETS=10 ;; - 050) export ICEPETS=30 ;; - 025) export ICEPETS=80 ;; - *) - echo "FATAL: Number of PEs not defined for ice resolution '$ICERES'" - echo " Please add an entry to config.resources within fcst for this resolution" - exit 3 - esac - (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) - fi - - if [[ $CDUMP == "gfs" ]]; then - export npe_fcst_gfs=$NTASKS_TOT - else - export npe_fcst=$NTASKS_TOT - fi - done - -elif [ $step = "ocnpost" ]; then - - export wtime_ocnpost="00:30:00" - export npe_ocnpost=1 - export npe_node_ocnpost=1 - export nth_ocnpost=1 - export memory_ocnpost="96G" - -elif [ $step = "post" ]; then - - export wtime_post="02:00:00" - export wtime_post_gfs="06:00:00" - export memory_post="60G" - export npe_post=112 - export nth_post=1 - export npe_node_post=12 - export npe_node_dwn=$npe_node_max - if [ $machine = "WCOSS_DELL_P3" ]; then - export npe_dwn=28 - else - export npe_dwn=24 - fi - if [ "$WRITE_DOPOST" = ".true." ]; then - export npe_post=$npe_dwn - export wtime_post="00:10:00" - export wtime_post_gfs="00:10:00" - fi - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export npe_node_post=28 ; fi - if [[ "$machine" == "WCOSS_C" ]]; then export memory_post="3072M"; fi - -elif [ $step = "wafs" ]; then - - export wtime_wafs="00:30:00" - export npe_wafs=1 - export npe_node_wafs=1 - export nth_wafs=1 - -elif [ $step = "wafsgcip" ]; then - - export wtime_wafsgcip="00:30:00" - export npe_wafsgcip=2 - export npe_node_wafsgcip=1 - export nth_wafsgcip=1 - -elif [ $step = "wafsgrib2" ]; then - - export wtime_wafsgrib2="00:30:00" - export npe_wafsgrib2=1 - export npe_node_wafsgrib2=1 - export nth_wafsgrib2=1 - -elif [ $step = "wafsblending" ]; then - - export wtime_wafsblending="00:30:00" - export npe_wafsblending=1 - export npe_node_wafsblending=1 - export nth_wafsblending=1 - -elif [ $step = "wafsgrib20p25" ]; then - - export wtime_wafsgrib20p25="00:30:00" - export npe_wafsgrib20p25=1 - export npe_node_wafsgrib20p25=1 - export nth_wafsgrib20p25=1 - -elif [ $step = "wafsblending0p25" ]; then - - export wtime_wafsblending0p25="00:30:00" - export npe_wafsblending0p25=1 - export npe_node_wafsblending0p25=1 - export nth_wafsblending0p25=1 - -elif [ $step = "vrfy" ]; then - - export wtime_vrfy="03:00:00" - export wtime_vrfy_gfs="06:00:00" - export npe_vrfy=3 - export nth_vrfy=1 - export npe_node_vrfy=1 - export npe_vrfy_gfs=1 - export npe_node_vrfy_gfs=1 - if [[ "$machine" == "WCOSS_C" ]]; then - export memory_vrfy="3072M" - elif [[ "$machine" == "HERA" ]]; then - export memory_vrfy="16384M" - fi - -elif [ $step = "metp" ]; then - - export nth_metp=1 - export wtime_metp="03:00:00" - export npe_metp=4 - export npe_node_metp=4 - export wtime_metp_gfs="06:00:00" - export npe_metp_gfs=4 - export npe_node_metp_gfs=4 - if [[ "$machine" == "WCOSS_C" ]]; then - export memory_metp="3072M" - elif [[ "$machine" == "THEIA" ]]; then - export memory_metp="16384M" - fi - -elif [ $step = "echgres" ]; then - - export wtime_echgres="00:10:00" - export npe_echgres=3 - export nth_echgres=$npe_node_max - export npe_node_echgres=1 - -elif [ $step = "init" ]; then - - export wtime_init="00:30:00" - export npe_init=24 - export nth_init=1 - export npe_node_init=6 - if [ $machine = "WCOSS_DELL_P3" ]; then - export memory_init="10G" - elif [ $machine = "JET" ]; then - export memory_init="50G" - else - export memory_init="70G" - fi - -elif [ $step = "init_chem" ]; then - - export wtime_init_chem="00:30:00" - export npe_init_chem=1 - export npe_node_init_chem=1 - -elif [ $step = "mom6ic" ]; then - - export wtime_mom6ic="00:30:00" - export npe_mom6ic=24 - export npe_node_mom6ic=24 - -elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then - - eval "export wtime_$step='06:00:00'" - eval "export npe_$step=1" - eval "export npe_node_$step=1" - eval "export nth_$step=1" - eval "export memory_$step=2048M" - -elif [ $step = "coupled_ic" ]; then - - export wtime_coupled_ic="00:15:00" - export npe_coupled_ic=1 - export npe_node_coupled_ic=1 - export nth_coupled_ic=1 - -elif [ $step = "eobs" -o $step = "eomg" ]; then - - export wtime_eobs="00:15:00" - export wtime_eomg="01:00:00" - if [ $CASE = "C768" ]; then - export npe_eobs=200 - elif [ $CASE = "C384" ]; then - export npe_eobs=100 - elif [ $CASE = "C192" ]; then - export npe_eobs=40 - elif [ $CASE = "C96" -o $CASE = "C48" ]; then - export npe_eobs=20 - fi - export nth_eobs=2 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_eobs=7; fi - export npe_node_eobs=$(echo "$npe_node_max / $nth_eobs" | bc) - if [[ "$machine" == "WCOSS_C" ]]; then export memory_eobs="3072M"; fi - -elif [ $step = "ediag" ]; then - - export wtime_ediag="00:06:00" - export npe_ediag=56 - export nth_ediag=1 - export npe_node_ediag=$npe_node_max - if [[ "$machine" == "WCOSS_C" ]]; then export memory_ediag="3072M"; fi - -elif [ $step = "eupd" ]; then - - export wtime_eupd="00:30:00" - if [ $CASE = "C768" ]; then - export npe_eupd=480 - export nth_eupd=6 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export nth_eupd=7 - fi - if [[ "$machine" = "HERA" ]]; then - export npe_eupd=150 - export nth_eupd=40 - fi - elif [ $CASE = "C384" ]; then - export npe_eupd=270 - export nth_eupd=2 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then - export nth_eupd=9 - fi - if [[ "$machine" = "HERA" ]]; then - export npe_eupd=100 - export nth_eupd=40 - fi - elif [ $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then - export npe_eupd=42 - export nth_eupd=2 - if [[ "$machine" = "HERA" ]]; then - export npe_eupd=40 - export nth_eupd=40 - fi - fi - export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) - if [[ "$machine" == "WCOSS_C" ]]; then - export memory_eupd="3072M" - fi - -elif [ $step = "ecen" ]; then - - export wtime_ecen="00:10:00" - export npe_ecen=80 - export nth_ecen=6 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_ecen=7; fi - if [ $CASE = "C384" -o $CASE = "C192" -o $CASE = "C96" -o $CASE = "C48" ]; then export nth_ecen=2; fi - export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) - export nth_cycle=$nth_ecen - if [[ "$machine" == "WCOSS_C" ]]; then export memory_ecen="3072M"; fi - -elif [ $step = "esfc" ]; then - - export wtime_esfc="00:06:00" - export npe_esfc=80 - export npe_node_esfc=$npe_node_max - export nth_esfc=1 - export nth_cycle=$nth_esfc - if [[ "$machine" == "WCOSS_C" ]]; then export memory_esfc="3072M"; fi - -elif [ $step = "efcs" ]; then - - if [ $CASE = "C768" ]; then - export wtime_efcs="01:00:00" - else - export wtime_efcs="00:40:00" - fi - export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) - export nth_efcs=${nth_fv3:-2} - export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) - if [[ "$machine" == "WCOSS_C" ]]; then export memory_efcs="254M"; fi - -elif [ $step = "epos" ]; then - - export wtime_epos="00:15:00" - export npe_epos=80 - export nth_epos=6 - if [[ "$machine" = "WCOSS_DELL_P3" ]]; then export nth_epos=7; fi - export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) - if [[ "$machine" == "WCOSS_C" ]]; then export memory_epos="254M"; fi - -elif [ $step = "postsnd" ]; then - - export wtime_postsnd="02:00:00" - export npe_postsnd=40 - export nth_postsnd=1 - export npe_node_postsnd=5 - export npe_postsndcfp=9 - export npe_node_postsndcfp=3 - if [ $OUTPUT_FILE == "nemsio" ]; then - export npe_postsnd=13 - export npe_node_postsnd=4 - fi - if [[ "$machine" = "HERA" ]]; then export npe_node_postsnd=2; fi - if [[ "$machine" == "WCOSS_C" ]]; then export memory_postsnd="254M"; fi - -elif [ $step = "awips" ]; then - - export wtime_awips="03:30:00" - export npe_awips=4 - export npe_node_awips=4 - export nth_awips=2 - if [[ "$machine" == "WCOSS_DELL_P3" ]]; then - export npe_awips=2 - export npe_node_awips=2 - export nth_awips=1 - fi - if [[ "$machine" == "WCOSS_C" ]]; then export memory_awips="2048M"; fi - -elif [ $step = "gempak" ]; then - - export wtime_gempak="02:00:00" - export npe_gempak=17 - export npe_node_gempak=4 - export nth_gempak=3 - if [[ "$machine" == "WCOSS_C" ]]; then export memory_gempak="254M"; fi - -else - - echo "Invalid step = $step, ABORT!" - exit 2 - -fi - -echo "END: config.resources" diff --git a/parm/config/config.vrfy b/parm/config/config.vrfy deleted file mode 100755 index adea840e7b..0000000000 --- a/parm/config/config.vrfy +++ /dev/null @@ -1,198 +0,0 @@ -#!/bin/ksh -x - -########## config.vrfy ########## -# Verification step specific - -echo "BEGIN: config.vrfy" - -# Get task specific resources -. $EXPDIR/config.resources vrfy - -export DO_VSDB=${DO_VSDB:-"NO"} # Switch to run VSDB package; set in config.base - -export VDUMP="gfs" # Verifying dump -export CDUMPFCST="gdas" # Fit-to-obs with GDAS/GFS prepbufr -export CDFNL="gdas" # Scores verification against GDAS/GFS analysis - -export MKPGB4PRCP="NO" # Make 0.25-deg pgb files in ARCDIR for precip verification ## JKH -export VRFYFITS="NO" # Fit to observations ## JKH -export VSDB_STEP1="NO" # Populate VSDB database ## JKH -export VSDB_STEP2="NO" -export VRFYG2OBS="NO" # Grid to observations, see note below if turning ON ## JKH -export VRFYPRCP="NO" # Precip threat scores ## JKH -export VRFYRAD="NO" # Radiance data assimilation monitoring ## JKH -export VRFYOZN="NO" # Ozone data assimilation monitoring ## JKH -export VRFYMINMON="NO" # GSI minimization monitoring ## JKH -export VRFYTRAK="YES" # Hurricane track verification -export VRFYGENESIS="NO" # Cyclone genesis verification ## JKH -export VRFYFSU="NO" # Cyclone genesis verification (FSU) -export RUNMOS="NO" # whether to run entire MOS package - -#------------------------------------------------- -# Fit to Observations -#------------------------------------------------- - -if [ $VRFYFITS = "YES" ]; then - - export fit_ver="newm.1.3" - export fitdir="$BASE_GIT/verif/global/Fit2Obs/${fit_ver}/batrun" - export PRVT=$HOMEgfs/fix/fix_gsi/prepobs_errtable.global - export HYBLEVS=$HOMEgfs/fix/fix_am/global_hyblev.l${LEVS}.txt - export CUE2RUN=$QUEUE - - export VBACKUP_FITS=24 - - export CONVNETC="NO" - if [ ${netcdf_diag:-".false."} = ".true." ]; then - export CONVNETC="YES" - fi - - if [ $machine = "WCOSS_C" ]; then - export fitdir="$BASE_GIT/verif/global/parafits.fv3nems/batrun" - export PREPQFITSH="$fitdir/subfits_cray_nems" - elif [ $machine = "WCOSS_DELL_P3" ]; then - export PREPQFITSH="$fitdir/subfits_dell_nems" - elif [ $machine = "HERA" ]; then - export PREPQFITSH="$fitdir/subfits_hera_slurm" - elif [ $machine = "ORION" ]; then - export PREPQFITSH="$fitdir/subfits_orion_netcdf" - else - echo "Fit2Obs NOT supported on this machine" - fi - -fi - -#---------------------------------------------------------- -# VSDB STEP1, Verify Precipipation and Grid To Obs options -#---------------------------------------------------------- -# All these call $VSDBSH - -if [ "$CDUMP" = "gfs" ] ; then - ddd=$(echo $CDATE |cut -c 1-8) - #if [ $ddd -eq 5 -o $ddd -eq 10 ]; then export VSDB_STEP2 = "YES" ;fi - - if [ $VSDB_STEP1 = "YES" -o $VSDB_STEP2 = "YES" -o $VRFYPRCP = "YES" -o $VRFYG2OBS = "YES" ]; then - export BACKDATEVSDB=24 # execute vsdbjob for the previous day - export VBACKUP_PRCP=24 # back up for QPF verification data - export vsdbsave="$NOSCRUB/archive/vsdb_data" # place to save vsdb database - export vsdbhome=$BASE_VERIF # location of global verification scripts - export VSDBSH="$vsdbhome/vsdbjob.sh" # VSDB job script - export VSDBJOBSH=$SCRgfs/vsdbjob_submit.sh # VSDB submission job - export vlength=$FHMAX_GFS # verification length - export vhr_rain=$FHMAX_GFS # verification length for precip - export ftyplist="pgbq" # verif. files used for computing QPF ETS scores - export ptyplist="PRATE" # precip types in GRIB: PRATE or APCP - export anltype="gfs" # default=gfs, analysis type (gfs or gdas) for verification - export rain_bucket=6 # prate in pgb files is 6-hr accumulated - - export VSDB_START_DATE="$SDATE" # starting date for vsdb maps - export webhost="emcrzdm.ncep.noaa.gov" # webhost(rzdm) computer - export webhostid="$USER" # webhost(rzdm) user name - export SEND2WEB="NO" # whether or not to send maps to webhost - export WEBDIR="/home/people/emc/www/htdocs/gmb/${webhostid}/vsdb/$PSLOT" - export mdlist="gfs $PSLOT " # exps (up to 10) to compare in maps - fi -fi - - -#---------------------------------------------------------- -# Minimization, Radiance and Ozone Monitoring -#---------------------------------------------------------- - -if [ $VRFYRAD = "YES" -o $VRFYMINMON = "YES" -o $VRFYOZN = "YES" ]; then - - export envir="para" - - # Radiance Monitoring - if [[ "$VRFYRAD" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then - - export RADMON_SUFFIX=$PSLOT - export TANKverf="$NOSCRUB/monitor/radmon" - export VRFYRADSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFRAD" - - fi - - # Minimization Monitoring - if [[ "$VRFYMINMON" = "YES" ]] ; then - - export MINMON_SUFFIX=$PSLOT - export M_TANKverf="$NOSCRUB/monitor/minmon" - if [[ "$CDUMP" = "gdas" ]] ; then - export VRFYMINSH="$HOMEgfs/jobs/JGDAS_ATMOS_VMINMON" - elif [[ "$CDUMP" = "gfs" ]] ; then - export VRFYMINSH="$HOMEgfs/jobs/JGFS_ATMOS_VMINMON" - fi - - fi - - # Ozone Monitoring - if [[ "$VRFYOZN" == "YES" && "$CDUMP" == "$CDFNL" ]] ; then - - export HOMEgfs_ozn="$HOMEgfs" - export OZNMON_SUFFIX=$PSLOT - export TANKverf_ozn="$NOSCRUB/monitor/oznmon" - export VRFYOZNSH="$HOMEgfs/jobs/JGDAS_ATMOS_VERFOZN" - - fi - -fi - - -#------------------------------------------------- -# Cyclone genesis and cyclone track verification -#------------------------------------------------- - -export ens_tracker_ver=v1.1.15.4 -export HOMEens_tracker=$BASE_GIT/TC_tracker/TC_tracker.${ens_tracker_ver} -## JKH -if [ $machine = "JET" ] ; then - export HOMEens_tracker=$HOMEgfs/sorc/ens_tracker.${ens_tracker_ver} -fi - -if [ "$VRFYTRAK" = "YES" ]; then - - export TRACKERSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_TRACKER" - if [ "$CDUMP" = "gdas" ]; then - export FHOUT_CYCLONE=3 - export FHMAX_CYCLONE=$FHMAX - else - export FHOUT_CYCLONE=6 - export FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) - fi - ## JKH - if [ $machine = "JET" ]; then - export COMINsyn=${COMINsyn:-/mnt/lfs4/HFIP/hwrf-data/hwrf-input/SYNDAT-PLUS} - else - export COMINsyn=${COMINsyn:-${COMROOT}/gfs/prod/syndat} - fi -fi - - -if [[ "$VRFYGENESIS" == "YES" && "$CDUMP" == "gfs" ]]; then - - export GENESISSH="$HOMEgfs/jobs/JGFS_ATMOS_CYCLONE_GENESIS" -fi - -if [[ "$VRFYFSU" == "YES" && "$CDUMP" == "gfs" ]]; then - - export GENESISFSU="$HOMEgfs/jobs/JGFS_ATMOS_FSU_GENESIS" -fi - -if [[ "$RUNMOS" == "YES" && "$CDUMP" == "gfs" ]]; then - - if [ $machine = "WCOSS_C" ] ; then - export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.cray" - elif [ $machine = "WCOSS_DELL_P3" ] ; then - export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.dell" - elif [ $machine = "HERA" ] ; then - export RUNGFSMOSSH="$HOMEgfs/scripts/run_gfsmos_master.sh.hera" - else - echo "WARNING: MOS package is not enabled on $machine!" - export RUNMOS="NO" - export RUNGFSMOSSH="" - fi -fi - - - -echo "END: config.vrfy" diff --git a/parm/config/config.wave b/parm/config/config.wave deleted file mode 100755 index 3fc8ea7f87..0000000000 --- a/parm/config/config.wave +++ /dev/null @@ -1,140 +0,0 @@ -#!/bin/ksh -x - -########## config.wave ########## -# Wave steps specific - -echo "BEGIN: config.wave" - -# Parameters that are common to all wave model steps - -# System and version -export wave_sys_ver=v1.0.0 - -export EXECwave="$HOMEgfs/exec" -export FIXwave="$HOMEgfs/fix/fix_wave" -export PARMwave="$HOMEgfs/parm/wave" -export USHwave="$HOMEgfs/ush" - -# This config contains variables/parameters used in the fcst step -# Some others are also used across the workflow in wave component scripts - -# General runtime labels -export CDUMPwave="${CDUMP}wave" - -# In GFS/GDAS, restart files are generated/read from gdas runs -export CDUMPRSTwave="gdas" - -# Grids for wave model -# GFSv16 -export waveGRD=${waveGRD:-'gnh_10m aoc_9km gsh_15m'} -export waveGRDN=${waveGRDN:-'1 2 3'} # gridnumber for ww3_multi -export waveGRDG=${waveGRDG:-'10 20 30'} # gridgroup for ww3_multi -export USE_WAV_RMP=${USE_WAV_RMP:-'YES'} #yes/no rmp grid remapping pre-processed coefficients -export DOBNDPNT_WAVE=${DOBNDPNT_WAVE:-'YES'} - -# ESMF input grid -export waveesmfGRD=${waveesmfGRD:-'glox_10m'} # input grid - -# Grids for input wind fields -export WAVEWND_DID= -export WAVEWND_FID= - -# Grids for output fields (used in all steps) -export waveuoutpGRD=points -export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native - # in POST will generate grib unless gribOK not set -export wavesbsGRD='' # side-by-side grids generated as wave model runs, writes to com -export wavepostGRD=${wavepostGRD:-'gnh_10m aoc_9km gsh_15m'} # Native grids that will be post-processed (grib2) - - -# The start time reflects the number of hindcast hours prior to the cycle initial time -if [ "$CDUMP" = "gdas" ]; then - export FHMAX_WAV=${FHMAX:-9} -else - export FHMAX_WAV=$FHMAX_GFS -fi -export WAVHINDH=${WAVHINDH:-0} -export FHMIN_WAV=${FHMIN_WAV:-0} -export FHOUT_WAV=${FHOUT_WAV:-3} -export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} -export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} - -# gridded and point output rate -export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) -export DTPNT_WAV=${DTPNT_WAV:-3600} -export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) - -# Selected output parameters (gridded) -export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} - -# Restart file config -if [ "$CDUMP" = "gdas" ]; then - export WAVNCYC=4 - export WAVHCYC=${assim_freq:-6} - export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days -elif [ ${gfs_cyc} -ne 0 ]; then - export WAVHCYC=${assim_freq:-6} - export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days -else - export WAVHCYC=0 - export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days -fi - -# Restart timing business - -export RSTTYPE_WAV='T' # generate second tier of restart files -if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS - export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file - export DT_2_RST_WAV=43200 # restart stride for checkpointing restart - export RSTIOFF_WAV=0 # first restart file offset relative to model start -else # This is a GFS run - rst_dt_gfs=$(( restart_interval_gfs * 3600 )) - if [ $rst_dt_gfs -gt 0 ]; then - export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file - export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart - else - rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) - export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file - export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run - fi - export RSTIOFF_WAV=0 # first restart file offset relative to model start -fi -# -# Set runmember to default value if not GEFS cpl run -# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) -export RUNMEM=${RUNMEM:--1} -# Set wave model member tags if ensemble run -# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN -if [ $RUNMEM = -1 ]; then -# No suffix added to model ID in case of deterministic run - export waveMEMB= -else -# Extract member number only - export waveMEMB=$(echo $RUNMEM | grep -o '..$') -fi - -# Determine if wave component needs input and/or is coupled -export WW3ATMINP='CPL' -if [[ $DO_ICE == "YES" ]]; then - export WW3ICEINP='CPL' - export WAVEICE_FID= -else - export WW3ICEINP='YES' - export WAVEICE_FID=glix_10m -fi - -export WAVECUR_DID=rtofs -if [[ $DO_OCN == "YES" ]]; then - export WW3CURINP='CPL' - export WAVECUR_FID= -else - export WW3CURINP='YES' - export WAVECUR_FID=glix_10m -fi - -# Determine if input is from perturbed ensemble (T) or single input file (F) for all members -export WW3ATMIENS='F' -export WW3ICEIENS='F' -export WW3CURIENS='F' - -echo "END: config.wave" diff --git a/parm/config/config.waveinit b/parm/config/config.waveinit deleted file mode 100755 index 93960e5e25..0000000000 --- a/parm/config/config.waveinit +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/ksh -x - -########## config.waveinit ########## -# Wave steps specific - -echo "BEGIN: config.waveinit" - -# Get task specific resources -. $EXPDIR/config.resources waveinit - -# Step label -export sigMODE=${sigMODE:-init} - -echo "END: config.waveinit" diff --git a/parm/config/config.wavepostsbs b/parm/config/config.wavepostsbs deleted file mode 100755 index c6fa2187e6..0000000000 --- a/parm/config/config.wavepostsbs +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/ksh -x - -########## config.wavepostsbs ########## -# Wave steps specific - -echo "BEGIN: config.wavepostsbs" - -# Get task specific resources -. $EXPDIR/config.resources wavepostsbs - -# Subgrid info for grib2 encoding -export WAV_SUBGRBSRC="" -export WAV_SUBGRB="" - -# Options for point output (switch on/off boundary point output) -export DOIBP_WAV='NO' # Input boundary points -export DOFLD_WAV='YES' # Field data -export DOPNT_WAV='YES' # Station data -export DOGRB_WAV='YES' # Create grib2 files -export DOGRI_WAV='YES' # Create interpolated grids -export DOSPC_WAV='YES' # Spectral post -export DOBLL_WAV='YES' # Bulletin post - -echo "END: config.wavepostsbs" diff --git a/parm/config/config.waveprep b/parm/config/config.waveprep deleted file mode 100755 index ae936f3f63..0000000000 --- a/parm/config/config.waveprep +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/ksh -x - -########## config.waveprep ########## -# Wave steps specific - -echo "BEGIN: config.waveprep" - -# Get task specific resources -. $EXPDIR/config.resources waveprep - -# Step label -export sigMODE=${sigMODE:-prep} - -export HOUR_INC=3 # This value should match with the one used in - # the wind update script -export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step -export POFILETYPE=1 # POFILETYPE=1 one point file per output step - -# Parameters for ww3_multi.inp -# Unified output T or F -export FUNIPNT='T' -# Output server type (see ww3_multi.inp in WW3 repo) -export IOSRV=${IOSRV:-'3'} -# Flag for dedicated output process for unified points -export FPNTPROC='T' -# Flag for grids sharing dedicated output processes -export FGRDPROC='F' -# Flag for masking computation in two-way nesting -export FLAGMASKCOMP="F" -# Flag for masking at printout time. -export FLAGMASKOUT="F" - -# Wind interval for standalone file-based runs -# Output stride -export WAV_WND_HOUR_INC=1 # This value should match with the one used in - # the wind update script -# Intake currents settings -export WAV_CUR_DT=${WAV_CUR_DT:-3} -export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} -export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} -export WAV_CUR_CDO_SMOOTH="NO" - -# Location of CDO module -export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} - -if [ "${WW3ICEINP}" = "YES" ]; then - export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 -fi - -echo "END: config.waveprep" diff --git a/parm/config/gefs/config.base.emc.dyn b/parm/config/gefs/config.base.emc.dyn new file mode 100644 index 0000000000..8a336a2650 --- /dev/null +++ b/parm/config/gefs/config.base.emc.dyn @@ -0,0 +1,379 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='glo_025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-3} +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" +export DO_JEDIOCNVAR="NO" +export DO_JEDILANDDA="NO" +export DO_MERGENSST="NO" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/parm/config/gefs/config.com b/parm/config/gefs/config.com new file mode 120000 index 0000000000..6a3754559c --- /dev/null +++ b/parm/config/gefs/config.com @@ -0,0 +1 @@ +../gfs/config.com \ No newline at end of file diff --git a/parm/config/gefs/config.coupled_ic b/parm/config/gefs/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/parm/config/gefs/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/parm/config/gefs/config.efcs b/parm/config/gefs/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/parm/config/gefs/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/parm/config/gefs/config.resources b/parm/config/gefs/config.resources new file mode 100644 index 0000000000..9dd5c6e737 --- /dev/null +++ b/parm/config/gefs/config.resources @@ -0,0 +1,958 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlprep landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ ${PARTITION_BATCH} = "xjet" ]]; then + export npe_node_max=24 + elif [[ ${PARTITION_BATCH} = "vjet" || ${PARTITION_BATCH} = "sjet" ]]; then + export npe_node_max=16 + elif [[ ${PARTITION_BATCH} = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ ${machine} = "HERA" ]]; then + export npe_node_max=40 +elif [[ ${machine} = "S4" ]]; then + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_node_max=32 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ ${machine} = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ ${step} = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ ${step} = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ ${step} = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ ${step} = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ ${step} = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ ${step} = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ ${step} = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ ${step} = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ ${step} = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ ${step} = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlprep" || "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + elif [[ "${step}" = "landanlprep" ]]; then + export wtime_landanlprep="00:30:00" + npe_landanlprep=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlprep + export nth_landanlprep=1 + npe_node_landanlprep=$(echo "${npe_node_max} / ${nth_landanlprep}" | bc) + export npe_node_landanlprep + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ ${step} = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ ${CASE} = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ ${machine} = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ ${machine} = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ ${PARTITION_BATCH} = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ ${PARTITION_BATCH} = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ ${step} = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ ${step} = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case "${CASE}" in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + declare -x "wtime_${step}_gfs"="06:00:00" + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ ${step} = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ ${machine} == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ ${step} = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ ${step} = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ ${step} = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ ${step} = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ ${step} = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ ${step} = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ ${step} = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ ${step} = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ ${machine} == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ ${machine} == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ ${step} = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ ${step} = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ ${step} = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ ${step} = "arch" || ${step} = "earc" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ ${step} = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ ${step} = "eobs" || ${step} = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eobs=200 + elif [[ ${CASE} = "C384" ]]; then + export npe_eobs=100 + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ ${machine} = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ ${step} = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ ${step} = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ ${CASE} = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ ${CASE} = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ ${machine} = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ ${step} = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ ${CASE} = "C384" || ${CASE} = "C192" || ${CASE} = "C96" || ${CASE} = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ ${step} = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ ${step} = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ ${step} = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ ${step} = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ ${step} = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/parm/config/gefs/config.ufs b/parm/config/gefs/config.ufs new file mode 100644 index 0000000000..562fc9228d --- /dev/null +++ b/parm/config/gefs/config.ufs @@ -0,0 +1,373 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=4 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "mx050") + ntasks_ww3=240 + ;; + "mx025") + ntasks_ww3=80 + ;; + "glo_025") + ntasks_ww3=262 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/parm/config/gefs/yaml/defaults.yaml b/parm/config/gefs/yaml/defaults.yaml new file mode 100644 index 0000000000..6e7633bfe0 --- /dev/null +++ b/parm/config/gefs/yaml/defaults.yaml @@ -0,0 +1 @@ +# This file intentionally left blank diff --git a/parm/config/gfs/config.aero b/parm/config/gfs/config.aero new file mode 100644 index 0000000000..1cb3bf5679 --- /dev/null +++ b/parm/config/gfs/config.aero @@ -0,0 +1,40 @@ +#! /usr/bin/env bash + +# UFS-Aerosols settings + +# Directory containing GOCART configuration files. Defaults to parm/chem if unset. +AERO_CONFIG_DIR=$HOMEgfs/parm/chem + +# Path to the input data tree +case $machine in + "HERA") + AERO_INPUTS_DIR="/scratch1/NCEPDEV/global/glopara/data/gocart_emissions" + ;; + "ORION") + AERO_INPUTS_DIR="/work2/noaa/global/wkolczyn/noscrub/global-workflow/gocart_emissions" + ;; + "S4") + AERO_INPUTS_DIR="/data/prod/glopara/gocart_emissions" + ;; + "WCOSS2") + AERO_INPUTS_DIR="/lfs/h2/emc/global/noscrub/emc.global/data/gocart_emissions" + ;; + "JET") + AERO_INPUTS_DIR="/lfs4/HFIP/hfv3gfs/glopara/data/gocart_emissions" + ;; + *) + echo "FATAL ERROR: Machine $machine unsupported for aerosols" + exit 2 + ;; +esac + +# Biomass burning emission dataset. Choose from: GBBEPx, QFED, NONE (default) +AERO_EMIS_FIRE=QFED + +# Aerosol convective scavenging factors (list of string array elements) +# Element syntax: ':'. Use = * to set default factor for all aerosol tracers +# Scavenging factors are set to 0 (no scavenging) if unset +aero_conv_scav_factors="'*:0.3','so2:0.0','msa:0.0','dms:0.0','nh3:0.4','nh4:0.6','bc1:0.6','bc2:0.6','oc1:0.4','oc2:0.4','dust1:0.6','dust2:0.6', 'dust3:0.6','dust4:0.6','dust5:0.6','seas1:0.5','seas2:0.5','seas3:0.5','seas4:0.5','seas5:0.5'" +# +# Number of diagnostic aerosol tracers (default: 0) +aero_diag_tracers=2 diff --git a/parm/config/gfs/config.aeroanl b/parm/config/gfs/config.aeroanl new file mode 100644 index 0000000000..41d63f8549 --- /dev/null +++ b/parm/config/gfs/config.aeroanl @@ -0,0 +1,24 @@ +#!/bin/bash -x + +########## config.aeroanl ########## +# configuration common to all aero analysis tasks + +echo "BEGIN: config.aeroanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/aero/obs/lists/gdas_aero_prototype.yaml +export AEROVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/variational/3dvar_gfs_aero.yaml +export STATICB_TYPE='identity' +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/aero/berror/staticb_${STATICB_TYPE}.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas +export BERROR_DATA_DIR=${FV3JEDI_FIX}/bump/aero/${CASE_ANL}/ +export BERROR_DATE="20160630.000000" + +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.aeroanl" diff --git a/parm/config/gfs/config.aeroanlfinal b/parm/config/gfs/config.aeroanlfinal new file mode 100644 index 0000000000..230ec5205a --- /dev/null +++ b/parm/config/gfs/config.aeroanlfinal @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlfinal ########## +# Post Aero Analysis specific + +echo "BEGIN: config.aeroanlfinal" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlfinal +echo "END: config.aeroanlfinal" diff --git a/parm/config/gfs/config.aeroanlinit b/parm/config/gfs/config.aeroanlinit new file mode 100644 index 0000000000..72175b8d0c --- /dev/null +++ b/parm/config/gfs/config.aeroanlinit @@ -0,0 +1,10 @@ +#!/bin/bash -x + +########## config.aeroanlinit ########## +# Pre Aero Analysis specific + +echo "BEGIN: config.aeroanlinit" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlinit +echo "END: config.aeroanlinit" diff --git a/parm/config/gfs/config.aeroanlrun b/parm/config/gfs/config.aeroanlrun new file mode 100644 index 0000000000..da13df2831 --- /dev/null +++ b/parm/config/gfs/config.aeroanlrun @@ -0,0 +1,11 @@ +#!/bin/bash -x + +########## config.aeroanlrun ########## +# Aerosol Analysis specific + +echo "BEGIN: config.aeroanlrun" + +# Get task specific resources +. $EXPDIR/config.resources aeroanlrun + +echo "END: config.aeroanlrun" diff --git a/parm/config/gfs/config.aerosol_init b/parm/config/gfs/config.aerosol_init new file mode 100644 index 0000000000..0e586e0231 --- /dev/null +++ b/parm/config/gfs/config.aerosol_init @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.aerosol_init ########## + +echo "BEGIN: config.aerosol_init" + +# Get task specific resources +source $EXPDIR/config.resources aerosol_init + +echo "END: config.aerosol_init" \ No newline at end of file diff --git a/parm/config/gfs/config.anal b/parm/config/gfs/config.anal new file mode 100644 index 0000000000..e3a17f9c6a --- /dev/null +++ b/parm/config/gfs/config.anal @@ -0,0 +1,146 @@ +#! /usr/bin/env bash + +########## config.anal ########## +# Analysis specific + +echo "BEGIN: config.anal" + +# Get task specific resources +. ${EXPDIR}/config.resources anal + +if [[ ${DONST} = "YES" ]]; then + . ${EXPDIR}/config.nsst +fi + +if [[ "${CDUMP}" = "gfs" ]] ; then + export USE_RADSTAT="NO" # This can be only used when bias correction is not-zero. + export GENDIAG="NO" + export SETUP='diag_rad=.false.,diag_pcp=.false.,diag_conv=.false.,diag_ozone=.false.,write_diag(3)=.false.,niter(2)=100,' + export DIAG_TARBALL="YES" +fi + +export npe_gsi=${npe_anal} + +if [[ "${CDUMP}" == "gfs" ]] ; then + export npe_gsi=${npe_anal_gfs} + export nth_anal=${nth_anal_gfs} +fi + +# Set parameters specific to L127 +if [[ ${LEVS} = "128" ]]; then + export GRIDOPTS="nlayers(63)=1,nlayers(64)=1," + export SETUP="gpstop=55,nsig_ext=45,${SETUP:-}" +fi + +# Set namelist option for LETKF +export lobsdiag_forenkf=".false." # anal does not need to write out jacobians + # set to .true. in config.eobs and config.eupd + +# Do not process the following datasets +export GSNDBF=${GSNDBF:-/dev/null} +export AMSREBF=${AMSREBF:-/dev/null} +export SSMITBF=${SSMITBF:-/dev/null} +export AMSR2BF=${AMSR2BF:-/dev/null} + + +# Set default values for info files and observation error +# NOTE: Remember to set PRVT in config.prep as OBERROR is set below +export CONVINFO=${FIXgsi}/global_convinfo.txt +export OZINFO=${FIXgsi}/global_ozinfo.txt +export SATINFO=${FIXgsi}/global_satinfo.txt +export OBERROR=${FIXgsi}/prepobs_errtable.global + + +# Use experimental dumps in EMC GFS v16 parallels +if [[ ${RUN_ENVIR} == "emc" ]]; then + # Set info files and prepobs.errtable.global for GFS v16 retrospective parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019021900 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + + # Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2019110706 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + + # Assimilate 135 (T) & 235 (uv) Canadian AMDAR observations + if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "2020052612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020040718 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate COSMIC-2 + if [[ "${PDY}${cyc}" -ge "2020052612" && "${PDY}${cyc}" -lt "2020082412" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020052612 + export OBERROR=${FIXgsi}/gfsv16_historical/prepobs_errtable.global.2020040718 + fi + + # Assimilate HDOB + if [[ "${PDY}${cyc}" -ge "2020082412" && "${PDY}${cyc}" -lt "2020091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020082412 + fi + + # Assimilate Metop-C GNSSRO + if [[ "${PDY}${cyc}" -ge "2020091612" && "${PDY}${cyc}" -lt "2021031712" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2020091612 + fi + + # Assimilate DO-2 GeoOptics + if [[ "${PDY}${cyc}" -ge "2021031712" && "${PDY}${cyc}" -lt "2021091612" ]]; then + export CONVINFO=${FIXgsi}/gfsv16_historical/global_convinfo.txt.2021031712 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_convinfo.txt.2021110312 is + # identical to ../global_convinfo.txt. Thus, the logic below is not + # needed at this time. + # Assimilate COSMIC-2 GPS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export CONVINFO=$FIXgsi/gfsv16_historical/global_convinfo.txt.2021110312 + # fi + + # Turn off assmilation of OMPS during period of bad data + if [[ "${PDY}${cyc}" -ge "2020011600" && "${PDY}${cyc}" -lt "2020011806" ]]; then + export OZINFO=${FIXgsi}/gfsv16_historical/global_ozinfo.txt.2020011600 + fi + + + # Set satinfo for start of GFS v16 parallels + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019021900 + fi + + # Turn on assimilation of Metop-C AMSUA and MHS + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020022012" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2019110706 + fi + + # Turn off assimilation of Metop-A MHS + if [[ "${PDY}${cyc}" -ge "2020022012" && "${PDY}${cyc}" -lt "2021052118" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2020022012 + fi + + # Turn off assimilation of S-NPP CrIS + if [[ "${PDY}${cyc}" -ge "2021052118" && "${PDY}${cyc}" -lt "2021092206" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021052118 + fi + + # Turn off assimilation of MetOp-A IASI + if [[ "${PDY}${cyc}" -ge "2021092206" && "${PDY}${cyc}" -lt "2021102612" ]]; then + export SATINFO=${FIXgsi}/gfsv16_historical/global_satinfo.txt.2021092206 + fi + + # NOTE: + # As of 2021110312, gfsv16_historical/global_satinfo.txt.2021110312 is + # identical to ../global_satinfo.txt. Thus, the logic below is not + # needed at this time + # + # Turn off assmilation of all Metop-A MHS + # if [[ "${PDY}${cyc}" -ge "2021110312" && "${PDY}${cyc}" -lt "YYYYMMDDHH" ]]; then + # export SATINFO=$FIXgsi/gfsv16_historical/global_satinfo.txt.2021110312 + # fi +fi + +echo "END: config.anal" diff --git a/parm/config/gfs/config.analcalc b/parm/config/gfs/config.analcalc new file mode 100644 index 0000000000..9405114ecc --- /dev/null +++ b/parm/config/gfs/config.analcalc @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.analcalc ########## +# GFS post-anal specific (non-diag) + +echo "BEGIN: config.analcalc" + +# Get task specific resources +. $EXPDIR/config.resources analcalc + +if [[ "$CDUMP" == "gfs" ]]; then + export nth_echgres=$nth_echgres_gfs +fi + +echo "END: config.analcalc" diff --git a/parm/config/gfs/config.analdiag b/parm/config/gfs/config.analdiag new file mode 100644 index 0000000000..7b128d3bad --- /dev/null +++ b/parm/config/gfs/config.analdiag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.analdiag ########## +# GFS post-anal specific (diag) + +echo "BEGIN: config.analdiag" + +# Get task specific resources +. $EXPDIR/config.resources analdiag + +echo "END: config.analdiag" diff --git a/parm/config/gfs/config.arch b/parm/config/gfs/config.arch new file mode 100644 index 0000000000..6a0f6306a8 --- /dev/null +++ b/parm/config/gfs/config.arch @@ -0,0 +1,24 @@ +#! /usr/bin/env bash + +########## config.arch ########## +# Archive specific + +echo "BEGIN: config.arch" + +# Get task specific resources +. "${EXPDIR}/config.resources" arch + +export ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"NO"} +export ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} +export ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} + +#--online archive of nemsio files for fit2obs verification +export FITSARC="YES" +export FHMAX_FITS=132 +[[ "${FHMAX_FITS}" -gt "${FHMAX_GFS}" ]] && export FHMAX_FITS=${FHMAX_GFS} + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD=144 +export RMOLDEND=24 + +echo "END: config.arch" diff --git a/parm/config/gfs/config.atmanl b/parm/config/gfs/config.atmanl new file mode 100644 index 0000000000..c045704fa2 --- /dev/null +++ b/parm/config/gfs/config.atmanl @@ -0,0 +1,25 @@ +#! /usr/bin/env bash + +########## config.atmanl ########## +# configuration common to all atm var analysis tasks + +echo "BEGIN: config.atmanl" + +export CASE_ANL=${CASE} +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/gdas_prototype_3d.yaml +export ATMVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/variational/3dvar_dripcg.yaml +export STATICB_TYPE="gsibec" +export BERROR_YAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/berror/staticb_${STATICB_TYPE}.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_var.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmanl" diff --git a/parm/config/gfs/config.atmanlfinal b/parm/config/gfs/config.atmanlfinal new file mode 100644 index 0000000000..a6b714f7fc --- /dev/null +++ b/parm/config/gfs/config.atmanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlfinal ########## +# Post Atm Var Analysis specific + +echo "BEGIN: config.atmanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlfinal +echo "END: config.atmanlfinal" diff --git a/parm/config/gfs/config.atmanlinit b/parm/config/gfs/config.atmanlinit new file mode 100644 index 0000000000..bc95ef4962 --- /dev/null +++ b/parm/config/gfs/config.atmanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmanlinit ########## +# Pre Atm Var Analysis specific + +echo "BEGIN: config.atmanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlinit +echo "END: config.atmanlinit" diff --git a/parm/config/gfs/config.atmanlrun b/parm/config/gfs/config.atmanlrun new file mode 100644 index 0000000000..68b7615718 --- /dev/null +++ b/parm/config/gfs/config.atmanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmanlrun ########## +# Atm Var Analysis specific + +echo "BEGIN: config.atmanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmanlrun + +echo "END: config.atmanlrun" diff --git a/parm/config/gfs/config.atmensanl b/parm/config/gfs/config.atmensanl new file mode 100644 index 0000000000..4d945ea717 --- /dev/null +++ b/parm/config/gfs/config.atmensanl @@ -0,0 +1,22 @@ +#! /usr/bin/env bash + +########## config.atmensanl ########## +# configuration common to all atm ens analysis tasks + +echo "BEGIN: config.atmensanl" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/atm/obs/lists/lgetkf_prototype.yaml +export ATMENSYAML=${HOMEgfs}/sorc/gdas.cd/parm/atm/lgetkf/lgetkf.yaml +export INTERP_METHOD='barycentric' + +export layout_x=1 +export layout_y=1 + +export io_layout_x=1 +export io_layout_y=1 + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x +export crtm_VERSION="2.3.0" + +echo "END: config.atmensanl" diff --git a/parm/config/gfs/config.atmensanlfinal b/parm/config/gfs/config.atmensanlfinal new file mode 100644 index 0000000000..5d8ec458c3 --- /dev/null +++ b/parm/config/gfs/config.atmensanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlfinal ########## +# Post Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlfinal +echo "END: config.atmensanlfinal" diff --git a/parm/config/gfs/config.atmensanlinit b/parm/config/gfs/config.atmensanlinit new file mode 100644 index 0000000000..34429023bb --- /dev/null +++ b/parm/config/gfs/config.atmensanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.atmensanlinit ########## +# Pre Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlinit +echo "END: config.atmensanlinit" diff --git a/parm/config/gfs/config.atmensanlrun b/parm/config/gfs/config.atmensanlrun new file mode 100644 index 0000000000..01f211a17a --- /dev/null +++ b/parm/config/gfs/config.atmensanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.atmensanlrun ########## +# Atm Ens Analysis specific + +echo "BEGIN: config.atmensanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" atmensanlrun + +echo "END: config.atmensanlrun" diff --git a/parm/config/gfs/config.awips b/parm/config/gfs/config.awips new file mode 100644 index 0000000000..9003e9f6b0 --- /dev/null +++ b/parm/config/gfs/config.awips @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.awips ########## +# GFS awips step specific + +echo "BEGIN: config.awips" + +# Get task specific resources +. $EXPDIR/config.resources awips + +export AWIPS20SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_20KM_1P0DEG +export AWIPSG2SH=$HOMEgfs/jobs/JGFS_ATMOS_AWIPS_G2 + +# No. of concurrent awips jobs +export NAWIPSGRP=42 + +echo "END: config.awips" diff --git a/parm/config/gfs/config.base.emc.dyn b/parm/config/gfs/config.base.emc.dyn new file mode 100644 index 0000000000..fc5973e01f --- /dev/null +++ b/parm/config/gfs/config.base.emc.dyn @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_POST_BATCH="@PARTITION_POST_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDILANDDA="@DO_JEDILANDDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="YES" # Run METPLUS jobs - set METPLUS settings in config.metp +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/parm/config/gfs/config.base.emc.dyn_hera b/parm/config/gfs/config.base.emc.dyn_hera new file mode 100644 index 0000000000..9d13445e35 --- /dev/null +++ b/parm/config/gfs/config.base.emc.dyn_hera @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_POST_BATCH="@PARTITION_POST_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="NO" # VRFY step ## JKH + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-120} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-120} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-120} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-120} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDILANDDA="@DO_JEDILANDDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp ## JKH +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/parm/config/gfs/config.base.emc.dyn_jet b/parm/config/gfs/config.base.emc.dyn_jet new file mode 100644 index 0000000000..6e71f4cb3b --- /dev/null +++ b/parm/config/gfs/config.base.emc.dyn_jet @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.base ########## +# Common to all steps + +echo "BEGIN: config.base" + +# Machine environment +export machine="@MACHINE@" + +# EMC parallel or NCO production +export RUN_ENVIR="emc" + +# Account, queue, etc. +export ACCOUNT="@ACCOUNT@" +export QUEUE="@QUEUE@" +export QUEUE_SERVICE="@QUEUE_SERVICE@" +export PARTITION_BATCH="@PARTITION_BATCH@" +export PARTITION_POST_BATCH="@PARTITION_POST_BATCH@" +export PARTITION_SERVICE="@PARTITION_SERVICE@" + +# Project to use in mass store: +HPSS_PROJECT="@HPSS_PROJECT@" + +# Directories relative to installation areas: +export HOMEgfs=@HOMEgfs@ +export PARMgfs=${HOMEgfs}/parm +export FIXgfs=${HOMEgfs}/fix +export USHgfs=${HOMEgfs}/ush +export UTILgfs=${HOMEgfs}/util +export EXECgfs=${HOMEgfs}/exec +export SCRgfs=${HOMEgfs}/scripts + +export FIXcice=${HOMEgfs}/fix/cice +export FIXmom=${HOMEgfs}/fix/mom6 +export FIXreg2grb2=${HOMEgfs}/fix/reg2grb2 + +######################################################################## + +# GLOBAL static environment parameters +export PACKAGEROOT="@PACKAGEROOT@" # TODO: set via prod_envir in Ops +export COMROOT="@COMROOT@" # TODO: set via prod_envir in Ops +export COMINsyn="@COMINsyn@" +export DMPDIR="@DMPDIR@" + +# USER specific paths +export HOMEDIR="@HOMEDIR@" +export STMP="@STMP@" +export PTMP="@PTMP@" +export NOSCRUB="@NOSCRUB@" + +# Base directories for various builds +export BASE_GIT="@BASE_GIT@" + +# Toggle to turn on/off GFS downstream processing. +export DO_BUFRSND="NO" # BUFR sounding products +export DO_GEMPAK="NO" # GEMPAK products +export DO_AWIPS="NO" # AWIPS products +export WAFSF="NO" # WAFS products +export DO_VRFY="YES" # VRFY step + +# NO for retrospective parallel; YES for real-time parallel +# arch.sh uses REALTIME for MOS. Need to set REALTIME=YES +# if want MOS written to HPSS. Should update arch.sh to +# use RUNMOS flag (currently in config.vrfy) +export REALTIME="YES" + +# Experiment mode (cycled or forecast-only) +export MODE="@MODE@" # cycled/forecast-only + +#################################################### +# DO NOT ADD MACHINE DEPENDENT STUFF BELOW THIS LINE +# IF YOU HAVE TO MAKE MACHINE SPECIFIC CHANGES BELOW +# FEEL FREE TO MOVE THEM ABOVE THIS LINE TO KEEP IT +# CLEAR +#################################################### +# Build paths relative to $HOMEgfs +export FIXgsi="${HOMEgfs}/fix/gsi" +export HOMEpost="${HOMEgfs}" +export HOMEobsproc="${BASE_GIT}/obsproc/v1.1.2" + +# CONVENIENT utility scripts and other environment parameters +export NCP="/bin/cp -p" +export NMV="/bin/mv" +export NLN="/bin/ln -sf" +export VERBOSE="YES" +export KEEPDATA="NO" +export CHGRP_RSTPROD="@CHGRP_RSTPROD@" +export CHGRP_CMD="@CHGRP_CMD@" +export NCDUMP="$NETCDF/bin/ncdump" +export NCLEN="$HOMEgfs/ush/getncdimlen" + +# Machine environment, jobs, and other utility scripts +export BASE_ENV="${HOMEgfs}/env" +export BASE_JOB="${HOMEgfs}/jobs/rocoto" + +# EXPERIMENT specific environment parameters +export SDATE=@SDATE@ +export EDATE=@EDATE@ +export EXP_WARM_START="@EXP_WARM_START@" +export assim_freq=6 +export PSLOT="@PSLOT@" +export EXPDIR="@EXPDIR@/${PSLOT}" +export ROTDIR="@ROTDIR@/${PSLOT}" +export ROTDIR_DUMP="YES" #Note: A value of "NO" does not currently work +export DUMP_SUFFIX="" +if [[ "${PDY}${cyc}" -ge "2019092100" && "${PDY}${cyc}" -le "2019110700" ]]; then + export DUMP_SUFFIX="p" # Use dumps from NCO GFS v15.3 parallel +fi +export DATAROOT="${STMP}/RUNDIRS/${PSLOT}" # TODO: set via prod_envir in Ops +export RUNDIR="${DATAROOT}" # TODO: Should be removed; use DATAROOT instead +export ARCDIR="${NOSCRUB}/archive/${PSLOT}" +export ATARDIR="@ATARDIR@" + +# Commonly defined parameters in JJOBS +export envir=${envir:-"prod"} +export NET="gfs" # NET is defined in the job-card (ecf) +export RUN=${RUN:-${CDUMP:-"gfs"}} # RUN is defined in the job-card (ecf); CDUMP is used at EMC as a RUN proxy +# TODO: determine where is RUN actually used in the workflow other than here +# TODO: is it possible to replace all instances of ${CDUMP} to ${RUN} to be +# consistent w/ EE2? + +# Get all the COM path templates +source "${EXPDIR}/config.com" + +export ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +export LOGSCRIPT=${LOGSCRIPT:-""} +#export ERRSCRIPT=${ERRSCRIPT:-"err_chk"} +#export LOGSCRIPT=${LOGSCRIPT:-"startmsg"} +export REDOUT="1>" +export REDERR="2>" + +export SENDECF=${SENDECF:-"NO"} +export SENDCOM=${SENDCOM:-"YES"} +export SENDSDM=${SENDSDM:-"NO"} +export SENDDBN_NTC=${SENDDBN_NTC:-"NO"} +export SENDDBN=${SENDDBN:-"NO"} +export DBNROOT=${DBNROOT:-${UTILROOT}/fakedbn} + +# APP settings +export APP=@APP@ + +# Defaults: +export DO_ATM="YES" +export DO_COUPLED="NO" +export DO_WAVE="NO" +export DO_OCN="NO" +export DO_ICE="NO" +export DO_AERO="NO" +export CCPP_SUITE="@CCPP_SUITE@" +export WAVE_CDUMP="" # When to include wave suite: gdas, gfs, or both +export DOBNDPNT_WAVE="NO" +export cplwav2atm=".false." +export FRAC_GRID=".true." + +# Set operational resolution +export OPS_RES="C768" # Do not change + +# Resolution specific parameters +export LEVS=128 +export CASE="@CASECTL@" +export CASE_ENS="@CASEENS@" +# TODO: This should not depend on $CASE or $CASE_ENS +# These are the currently available grid-combinations +case "${CASE}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=${OCNRES} +export waveGRD='mx025' + +case "${APP}" in + ATM) + export confignamevarfornems="atm" + ;; + ATMA) + export DO_AERO="YES" + export confignamevarfornems="atm_aero" + ;; + ATMW) + export DO_COUPLED="YES" + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export confignamevarfornems="leapfrog_atm_wav" + ;; + NG-GODAS) + export DO_ATM="NO" + export DO_OCN="YES" + export DO_ICE="YES" + ;; + S2S*) + export DO_COUPLED="YES" + export DO_OCN="YES" + export DO_ICE="YES" + export CCPP_SUITE="FV3_GFS_v17_coupled_p8" + export confignamevarfornems="cpld" + + if [[ "${APP}" =~ A$ ]]; then + export DO_AERO="YES" + export confignamevarfornems="${confignamevarfornems}_aero" + fi + + if [[ "${APP}" =~ ^S2SW ]]; then + export DO_WAVE="YES" + export WAVE_CDUMP="both" + export cplwav2atm=".true." + export confignamevarfornems="${confignamevarfornems}_outerwave" + fi + + source ${EXPDIR}/config.defaults.s2sw + + ;; + *) + echo "Unrecognized APP: ${1}" + exit 1 + ;; +esac + +# Surface cycle update frequency +if [[ "${CDUMP}" =~ "gdas" ]] ; then + export FHCYC=1 + export FTSFS=10 +elif [[ "${CDUMP}" =~ "gfs" ]] ; then + export FHCYC=24 +fi + +# Output frequency of the forecast model (for cycling) +export FHMIN=0 +export FHMAX=9 +export FHOUT=3 # Will be changed to 1 in config.base if (DOHYBVAR set to NO and l4densvar set to false) + +# Cycle to run EnKF (set to BOTH for both gfs and gdas) +export EUPD_CYC="gdas" + +# GFS cycle info +export gfs_cyc=@gfs_cyc@ # 0: no GFS cycle, 1: 00Z only, 2: 00Z and 12Z only, 4: all 4 cycles. + +# GFS output and frequency +export FHMIN_GFS=0 + +export FHMAX_GFS_00=${FHMAX_GFS_00:-168} ## JKH +export FHMAX_GFS_06=${FHMAX_GFS_06:-168} ## JKH +export FHMAX_GFS_12=${FHMAX_GFS_12:-168} ## JKH +export FHMAX_GFS_18=${FHMAX_GFS_18:-168} ## JKH +export FHMAX_GFS=$(eval echo \${FHMAX_GFS_${cyc}}) + +export FHOUT_GFS=${FHOUT_GFS:-6} ## JKH +export FHMAX_HF_GFS=${FHMAX_HF_GFS:-0} +export FHOUT_HF_GFS=${FHOUT_HF_GFS:-1} +if (( gfs_cyc != 0 )); then + export STEP_GFS=$(( 24 / gfs_cyc )) +else + export STEP_GFS="0" +fi +export ILPOST=1 # gempak output frequency up to F120 + +# GFS restart interval in hours +export restart_interval_gfs=0 + +export QUILTING=".true." +export OUTPUT_GRID="gaussian_grid" +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST +export WRITE_NSFLIP=".true." + +# IAU related parameters +export DOIAU="YES" # Enable 4DIAU for control with 3 increments +export IAUFHRS="3,6,9" +export IAU_FHROT=$(echo ${IAUFHRS} | cut -c1) +export IAU_DELTHRS=6 +export IAU_OFFSET=6 +export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble +export IAUFHRS_ENKF="3,6,9" +export IAU_DELTHRS_ENKF=6 + +# Use Jacobians in eupd and thereby remove need to run eomg +export lobsdiag_forenkf=".true." + +# if [[ "$SDATE" -lt "2019020100" ]]; then # no rtofs in GDA +# export DO_WAVE="NO" +# echo "WARNING: Wave suite turned off due to lack of RTOFS in GDA for SDATE" +# fi + +# Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL +export imp_physics=@IMP_PHYSICS@ + +# Shared parameters +# DA engine +export DO_JEDIATMVAR="@DO_JEDIATMVAR@" +export DO_JEDIATMENS="@DO_JEDIATMENS@" +export DO_JEDIOCNVAR="@DO_JEDIOCNVAR@" +export DO_JEDILANDDA="@DO_JEDILANDDA@" +export DO_MERGENSST="@DO_MERGENSST@" + +# Hybrid related +export DOHYBVAR="@DOHYBVAR@" +export NMEM_ENS=@NMEM_ENS@ +export NMEM_ENS_GFS=@NMEM_ENS@ +export SMOOTH_ENKF="NO" +export l4densvar=".true." +export lwrite4danl=".true." + +# EnKF output frequency +if [[ ${DOHYBVAR} = "YES" ]]; then + export FHMIN_ENKF=3 + export FHMAX_ENKF=9 + export FHMAX_ENKF_GFS=120 + export FHOUT_ENKF_GFS=3 + if [ $l4densvar = ".true." ]; then + export FHOUT=1 + export FHOUT_ENKF=1 + else + export FHOUT_ENKF=3 + fi +fi + +# if 3DVAR and IAU +if [[ ${DOHYBVAR} == "NO" && ${DOIAU} == "YES" ]]; then + export IAUFHRS="6" + export IAU_FHROT="3" + export IAU_FILTER_INCREMENTS=".true." + export IAUFHRS_ENKF="6" +fi + +# Check if cycle is cold starting, DOIAU off, or free-forecast mode +if [[ "${MODE}" = "cycled" && "${SDATE}" = "${PDY}${cyc}" && ${EXP_WARM_START} = ".false." ]] || [[ "${DOIAU}" = "NO" ]] || [[ "${MODE}" = "forecast-only" && ${EXP_WARM_START} = ".false." ]] ; then + export IAU_OFFSET=0 + export IAU_FHROT=0 +fi + +# turned on nsst in anal and/or fcst steps, and turn off rtgsst +export DONST="YES" +if [[ ${DONST} = "YES" ]]; then export FNTSFA=" "; fi + +# The switch to apply SST elevation correction or not +export nst_anl=.true. + +# Make the nsstbufr file on the fly or use the GDA version +export MAKE_NSSTBUFR="@MAKE_NSSTBUFR@" + +# Make the aircraft prepbufr file on the fly or use the GDA version +export MAKE_ACFTBUFR="@MAKE_ACFTBUFR@" + +# Analysis increments to zero in CALCINCEXEC +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" + +# Write analysis files for early cycle EnKF +export DO_CALC_INCREMENT_ENKF_GFS="YES" + +# Stratospheric increments to zero +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_EFOLD="5" + +# Swith to generate netcdf or binary diagnostic files. If not specified, +# script default to binary diagnostic files. Set diagnostic file +# variables here since used in both DA and vrfy jobs +export netcdf_diag=".true." +export binary_diag=".false." + +# Verification options +export DO_METP="NO" # Run METPLUS jobs - set METPLUS settings in config.metp ## JKH +export DO_FIT2OBS="NO" # Run fit to observations package + +# Archiving options +export HPSSARCH="@HPSSARCH@" # save data to HPSS archive +export LOCALARCH="@LOCALARCH@" # save data to local archive +if [[ ${HPSSARCH} = "YES" ]] && [[ ${LOCALARCH} = "YES" ]]; then + echo "Both HPSS and local archiving selected. Please choose one or the other." + exit 2 +fi +export ARCH_CYC=00 # Archive data at this cycle for warm_start capability +export ARCH_WARMICFREQ=4 # Archive frequency in days for warm_start capability +export ARCH_FCSTICFREQ=1 # Archive frequency in days for gdas and gfs forecast-only capability + +export DELETE_COM_IN_ARCHIVE_JOB="YES" # NO=retain ROTDIR. YES default in arch.sh and earc.sh. + +echo "END: config.base" diff --git a/parm/config/config.base.nco.static b/parm/config/gfs/config.base.nco.static old mode 100755 new mode 100644 similarity index 84% rename from parm/config/config.base.nco.static rename to parm/config/gfs/config.base.nco.static index d9d2764a08..c922ec4ad1 --- a/parm/config/config.base.nco.static +++ b/parm/config/gfs/config.base.nco.static @@ -1,4 +1,4 @@ -#!/bin/ksh -x +#! /usr/bin/env bash ########## config.base ########## # Common to all steps @@ -6,7 +6,7 @@ echo "BEGIN: config.base" # Machine environment -export machine="WCOSS_DELL_P3" +export machine="WCOSS2" # EMC parallel or NCO production export RUN_ENVIR="nco" @@ -31,7 +31,7 @@ export SCRgfs=$HOMEgfs/scripts # GLOBAL static environment parameters -export NWPROD="/gpfs/dell1/nco/ops/nwprod" +export PACKAGEROOT="/lfs/h1/ops/prod/packages" export RTMFIX=$CRTM_FIX # Machine specific paths used everywhere @@ -62,13 +62,10 @@ export REALTIME="YES" # CLEAR #################################################### # Build paths relative to $HOMEgfs -export FIXgsi="$HOMEgfs/fix/fix_gsi" +export FIXgsi="$HOMEgfs/fix/gsi" export HOMEfv3gfs="$HOMEgfs/sorc/fv3gfs.fd" export HOMEpost="$HOMEgfs" -export HOMEobsproc_prep="$NWPROD/obsproc_prep.v5.5.0" -export HOMEobsproc_network="$NWPROD/obsproc_global.v3.4.2" -export HOMEobsproc_global=$HOMEobsproc_network -export BASE_VERIF="$BASE_GIT/verif/global/tags/vsdb" +export HOMEobsproc="/lfs/h1/ops/prod/packages/obsproc.v1.1.2" # CONVENIENT utility scripts and other environment parameters export NCP="/bin/cp -p" @@ -90,7 +87,7 @@ export EDATE=2039123100 export assim_freq=6 export PSLOT="test" export EXPDIR="$EXPDIR" -export ROTDIR="$ROTDIR" +export ROTDIR="$(compath.py ${envir}/${NET}/${gfs_ver})" export ROTDIR_DUMP="YES" export DUMP_SUFFIX="" export RUNDIR="$DATAROOT" @@ -162,30 +159,17 @@ export FHMAX_HF_GFS=120 export FHOUT_HF_GFS=1 export ILPOST=1 # gempak output frequency up to F120 -# frequency for saving restart files. set to 6,12,24,48 etc +# frequency for saving restart files. set to 6,12,24,48 etc export restart_interval_gfs=12 # I/O QUILTING, true--use Write Component; false--use GFDL FMS # if quilting=true, choose OUTPUT_GRID as cubed_sphere_grid in netcdf or gaussian_grid -# if gaussian_grid, set OUTPUT_FILE for nemsio or netcdf # WRITE_DOPOST=true, use inline POST export QUILTING=".true." export OUTPUT_GRID="gaussian_grid" -export OUTPUT_FILE="netcdf" -export WRITE_DOPOST=".true." +export WRITE_DOPOST=".true." # WRITE_DOPOST=true, use inline POST export WRITE_NSFLIP=".true." -# suffix options depending on file format -if [ $OUTPUT_FILE = "netcdf" ]; then - export SUFFIX=".nc" - export NEMSIO_IN=".false." - export NETCDF_IN=".true." -else - export SUFFIX=".nemsio" - export NEMSIO_IN=".true." - export NETCDF_IN=".false." -fi - # IAU related parameters export DOIAU="YES" # Enable 4DIAU for control with 3 increments export IAUFHRS="3,6,9" @@ -195,26 +179,23 @@ export IAU_OFFSET=6 export DOIAU_ENKF=${DOIAU:-"YES"} # Enable 4DIAU for EnKF ensemble export IAUFHRS_ENKF="3,6,9" export IAU_DELTHRS_ENKF=6 -if [[ "$SDATE" = "$CDATE" ]]; then - export IAU_OFFSET=0 - export IAU_FHROT=0 -fi # Use Jacobians in eupd and thereby remove need to run eomg export lobsdiag_forenkf=".true." -# run GLDAS to spin up land ICs -export DO_GLDAS=YES -export gldas_cyc=00 - # run wave component export DO_WAVE=YES export WAVE_CDUMP="both" +export DOBNDPNT_WAVE="YES" # Microphysics Options: 99-ZhaoCarr, 8-Thompson; 6-WSM6, 10-MG, 11-GFDL export imp_physics=8 # Shared parameters +# DA engine +export DO_JEDIATMVAR="NO" +export DO_JEDIATMENS="NO" + # Hybrid related export DOHYBVAR="YES" export NMEM_ENKF="80" @@ -243,15 +224,10 @@ if [ $DONST = "YES" ]; then export FNTSFA=" "; fi export nst_anl=.true. # Analysis increments to zero in CALCINCEXEC -export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc'" - -if [ $OUTPUT_FILE = "nemsio" ]; then - export DO_CALC_INCREMENT="YES" - export DO_CALC_ANALYSIS="NO" -fi +export INCREMENTS_TO_ZERO="'liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" # Stratospheric increments to zero -export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc'" +export INCVARS_ZERO_STRAT="'sphum_inc','liq_wat_inc','icmr_inc','rwmr_inc','snmr_inc','grle_inc'" export INCVARS_EFOLD="5" # Swith to generate netcdf or binary diagnostic files. If not specified, diff --git a/parm/config/gfs/config.com b/parm/config/gfs/config.com new file mode 100644 index 0000000000..6a824012c6 --- /dev/null +++ b/parm/config/gfs/config.com @@ -0,0 +1,93 @@ +# shellcheck shell=bash +# Ignore shellcheck warnings about variables not being expanded; this is what we want +# shellcheck disable=SC2016 +echo "BEGIN: config.com" + +# These are just templates. All templates must use single quotations so variable +# expansion does not occur when this file is sourced. Substitution happens later +# during runtime. It is recommended to use the helper function `generate_com()`, +# to do this substitution, which is defined in `ush/preamble.sh`. +# +# Syntax for generate_com(): +# generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] +# +# options: +# -r: Make variable read-only (same as `decalre -r`) +# -x: Mark variable for declare -rx (same as `declare -x`) +# var1, var2, etc: Variable names whose values will be generated from a template +# and declared +# tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") +# +# Examples: +# # Current cycle and RUN +# YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS +# +# # Previous cycle and gdas +# RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ +# COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL +# +# # Current cycle and COM for first member +# MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY +# + +# +# If any restart, input, or analysis template is updated, `setup_expt.py.fill_COMROT_cycled()` +# must correspondingly be updated to match. +# +if [[ "${RUN_ENVIR:-emc}" == "nco" ]]; then + COM_OBS_TMPL=$(compath.py "${envir}/obsproc/${obsproc_ver}")'/${RUN}.${YMD}/${HH}/atmos' + COM_RTOFS_TMPL=$(compath.py "${envir}/${WAVECUR_DID}/${rtofs_ver}") +else + COM_OBS_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}/obs' + COM_RTOFS_TMPL='${DMPDIR}' +fi +declare -rx COM_OBS_TMPL COM_RTOFS_TMPL +declare -rx COM_OBSDMP_TMPL='${DMPDIR}/${DUMP}${DUMP_SUFFIX}.${YMD}/${HH}/atmos' + +COM_BASE='${ROTDIR}/${RUN}.${YMD}/${HH}/${MEMDIR}' + +declare -rx COM_TOP_TMPL='${ROTDIR}/${RUN}.${YMD}/${HH}' + +declare -rx COM_ATMOS_INPUT_TMPL=${COM_BASE}'/model_data/atmos/input' +declare -rx COM_ATMOS_RESTART_TMPL=${COM_BASE}'/model_data/atmos/restart' +declare -rx COM_ATMOS_ANALYSIS_TMPL=${COM_BASE}'/analysis/atmos' +declare -rx COM_LAND_ANALYSIS_TMPL=${COM_BASE}'/analysis/land' +declare -rx COM_ATMOS_HISTORY_TMPL=${COM_BASE}'/model_data/atmos/history' +declare -rx COM_ATMOS_MASTER_TMPL=${COM_BASE}'/model_data/atmos/master' +declare -rx COM_ATMOS_GRIB_TMPL=${COM_BASE}'/products/atmos/grib2/${GRID}' +declare -rx COM_ATMOS_BUFR_TMPL=${COM_BASE}'/products/atmos/bufr' +declare -rx COM_ATMOS_GEMPAK_TMPL=${COM_BASE}'/products/atmos/gempak/${GRID}' +declare -rx COM_ATMOS_GENESIS_TMPL=${COM_BASE}'/products/atmos/cyclone/genesis_vital' +declare -rx COM_ATMOS_TRACK_TMPL=${COM_BASE}'/products/atmos/cyclone/tracks' +declare -rx COM_ATMOS_GOES_TMPL=${COM_BASE}'/products/atmos/goes_sim' +declare -rx COM_ATMOS_IMAGERY_TMPL=${COM_BASE}'/products/atmos/imagery' +declare -rx COM_ATMOS_MINMON_TMPL=${COM_BASE}'/products/atmos/minmon' +declare -rx COM_ATMOS_WAFS_TMPL=${COM_BASE}'/products/atmos/wafs' +declare -rx COM_ATMOS_WMO_TMPL=${COM_BASE}'/products/atmos/wmo' + +declare -rx COM_WAVE_RESTART_TMPL=${COM_BASE}'/model_data/wave/restart' +declare -rx COM_WAVE_PREP_TMPL=${COM_BASE}'/model_data/wave/prep' +declare -rx COM_WAVE_HISTORY_TMPL=${COM_BASE}'/model_data/wave/history' +declare -rx COM_WAVE_GRID_TMPL=${COM_BASE}'/products/wave/gridded' +declare -rx COM_WAVE_STATION_TMPL=${COM_BASE}'/products/wave/station' +declare -rx COM_WAVE_GEMPAK_TMPL=${COM_BASE}'/products/wave/gempak' +declare -rx COM_WAVE_WMO_TMPL=${COM_BASE}'/products/wave/wmo' + +declare -rx COM_OCEAN_HISTORY_TMPL=${COM_BASE}'/model_data/ocean/history' +declare -rx COM_OCEAN_RESTART_TMPL=${COM_BASE}'/model_data/ocean/restart' +declare -rx COM_OCEAN_INPUT_TMPL=${COM_BASE}'/model_data/ocean/input' +declare -rx COM_OCEAN_ANALYSIS_TMPL=${COM_BASE}'/analysis/ocean' +declare -rx COM_OCEAN_2D_TMPL=${COM_BASE}'/products/ocean/2D' +declare -rx COM_OCEAN_3D_TMPL=${COM_BASE}'/products/ocean/3D' +declare -rx COM_OCEAN_DAILY_TMPL=${COM_BASE}'/products/ocean/daily' +declare -rx COM_OCEAN_XSECT_TMPL=${COM_BASE}'/products/ocean/xsect' +declare -rx COM_OCEAN_GRIB_TMPL=${COM_BASE}'/products/ocean/grib2/${GRID}' + +declare -rx COM_ICE_INPUT_TMPL=${COM_BASE}'/model_data/ice/input' +declare -rx COM_ICE_HISTORY_TMPL=${COM_BASE}'/model_data/ice/history' +declare -rx COM_ICE_RESTART_TMPL=${COM_BASE}'/model_data/ice/restart' + +declare -rx COM_CHEM_HISTORY_TMPL=${COM_BASE}'/model_data/chem/history' +declare -rx COM_CHEM_ANALYSIS_TMPL=${COM_BASE}'/analysis/chem' + +declare -rx COM_MED_RESTART_TMPL=${COM_BASE}'/model_data/med/restart' diff --git a/parm/config/gfs/config.coupled_ic b/parm/config/gfs/config.coupled_ic new file mode 100644 index 0000000000..50fab283b5 --- /dev/null +++ b/parm/config/gfs/config.coupled_ic @@ -0,0 +1,43 @@ +#! /usr/bin/env bash + +########## config.coupled_ic ########## + +echo "BEGIN: config.coupled_ic" + +# Get task specific resources +source ${EXPDIR}/config.resources coupled_ic + +if [[ "${machine}" == "WCOSS2" ]]; then + export BASE_CPLIC="/lfs/h2/emc/couple/noscrub/Jiande.Wang/IC" +elif [[ "${machine}" == "HERA" ]]; then + export BASE_CPLIC="/scratch1/NCEPDEV/climate/role.ufscpara/IC" +elif [[ "${machine}" == "ORION" ]]; then + export BASE_CPLIC="/work/noaa/global/glopara/data/ICSDIR/prototype_ICs" +elif [[ "${machine}" == "S4" ]]; then + export BASE_CPLIC="/data/prod/glopara/coupled_ICs" +elif [[ "${machine}" == "JET" ]]; then + export BASE_CPLIC="/mnt/lfs4/HFIP/hfv3gfs/glopara/data/ICSDIR/prototype_ICs" +fi + + +case "${CASE}" in + "C384") + #C384 and P8 ICs + export CPL_ATMIC=GEFS-NoahMP-aerosols-p8c + export CPL_ICEIC=CPC + export CPL_OCNIC=CPC3Dvar + export CPL_WAVIC=GEFSwave20210528v2 + ;; + "C768") + export CPL_ATMIC=HR1 + export CPL_ICEIC=HR1 + export CPL_OCNIC=HR1 + export CPL_WAVIC=HR1 + ;; + *) + echo "Unrecognized case: ${1}" + exit 1 + ;; +esac + +echo "END: config.coupled_ic" diff --git a/parm/config/gfs/config.defaults.s2sw b/parm/config/gfs/config.defaults.s2sw new file mode 100644 index 0000000000..1b0becefec --- /dev/null +++ b/parm/config/gfs/config.defaults.s2sw @@ -0,0 +1,26 @@ +#! /usr/bin/env bash + +# Empty variables must include a space otherwise they will be overwritten + +# config.base +FHMAX_GFS_00=48 +FHMAX_GFS_06=48 +FHMAX_GFS_12=48 +FHMAX_GFS_18=48 +FHOUT_GFS=6 +FHOUT_HF_GFS=-1 + +# config.fcst +min_seaice="1.0e-6" +use_cice_alb=".true." + +# config.wave +FHOUT_HF_WAV=3 +DTPNT_WAV=10800 +OUTPARS_WAV="WND CUR ICE HS T01 T02 DIR FP DP PHS PTP PDIR CHA" +DOBNDPNT_WAVE='NO' + +# config.arch +export ARCH_GAUSSIAN="YES" +export ARCH_GAUSSIAN_FHMAX=${FHMAX_GFS_00} +export ARCH_GAUSSIAN_FHINC=${FHOUT_GFS} diff --git a/parm/config/gfs/config.earc b/parm/config/gfs/config.earc new file mode 100644 index 0000000000..de73a93731 --- /dev/null +++ b/parm/config/gfs/config.earc @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.earc ########## +# Ensemble archive specific + +echo "BEGIN: config.earc" + +# Get task specific resources +. $EXPDIR/config.resources earc + +export NMEM_EARCGRP=10 + +#--starting and ending hours of previous cycles to be removed from rotating directory +export RMOLDSTD_ENKF=144 +export RMOLDEND_ENKF=24 + +echo "END: config.earc" diff --git a/parm/config/gfs/config.ecen b/parm/config/gfs/config.ecen new file mode 100644 index 0000000000..2b686c6b48 --- /dev/null +++ b/parm/config/gfs/config.ecen @@ -0,0 +1,21 @@ +#! /usr/bin/env bash + +########## config.ecen ########## +# Ensemble recentering specific + +echo "BEGIN: config.ecen" + +# Get task specific resources +. $EXPDIR/config.resources ecen + +# Number of concurrent ecen jobs [1 implies sequential] +# Usually IAUFHRS_ENKF=3,6,9, so NECENGRP=3. Scripting +# below queries IAUFHRS_ENKF to determine NECENGRP +export NECENGRP=1 +if [ $DOIAU_ENKF = "YES" ]; then + ngrps=$(grep -o ',' <<<"$IAUFHRS_ENKF" | grep -c .) + ((ngrps++)) + export NECENGRP=$ngrps +fi + +echo "END: config.ecen" diff --git a/parm/config/gfs/config.echgres b/parm/config/gfs/config.echgres new file mode 100644 index 0000000000..478c6b4bcf --- /dev/null +++ b/parm/config/gfs/config.echgres @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.echgres ########## +# regrid full-res forecast for use in ensemble-res analysis generation + +echo "BEGIN: config.echgres" + +# Get task specific resources +. $EXPDIR/config.resources echgres + +echo "END: config.echgres" diff --git a/parm/config/gfs/config.ediag b/parm/config/gfs/config.ediag new file mode 100644 index 0000000000..12b142088d --- /dev/null +++ b/parm/config/gfs/config.ediag @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.ediag ########## +# GFS ensemble post-eobs specific + +echo "BEGIN: config.ediag" + +# Get task specific resources +. $EXPDIR/config.resources ediag + +echo "END: config.ediag" diff --git a/parm/config/gfs/config.efcs b/parm/config/gfs/config.efcs new file mode 100644 index 0000000000..95c2cb58de --- /dev/null +++ b/parm/config/gfs/config.efcs @@ -0,0 +1,97 @@ +#! /usr/bin/env bash + +########## config.efcs ########## +# Ensemble forecast specific, dependency: config.fcst + +echo "BEGIN: config.efcs" + +# TODO: the _ENKF counterparts need to be defined in config.base +export DO_AERO=${DO_AERO_ENKF:-"NO"} +export DO_OCN=${DO_OCN_ENKF:-"NO"} +export DO_ICE=${DO_ICE_ENKF:-"NO"} +export DO_WAVE=${DO_WAVE_ENKF:-"NO"} + +# TODO: Possibly need OCNRES_ENKF, ICERES_ENKF, WAVRES_ENKF too +if [[ ${DO_OCN} == "YES" ]]; then + case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; + esac +fi +[[ ${DO_ICE} == "YES" ]] && export ICERES=$OCNRES +[[ ${DO_WAVE} == "YES" ]] && export waveGRD=${waveGRD_ENKF:-$waveGRD} # TODO: will we run waves with a different resolution in the ensemble? + +# Source model specific information that is resolution dependent +string="--fv3 ${CASE_ENS}" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Get task specific resources +. $EXPDIR/config.resources efcs + +# Use serial I/O for ensemble (lustre?) +export OUTPUT_FILETYPE_ATM="netcdf" +export OUTPUT_FILETYPE_SFC="netcdf" + +# Number of enkf members per fcst job +export NMEM_EFCSGRP=2 +export NMEM_EFCSGRP_GFS=1 +export RERUN_EFCSGRP="NO" + +# Turn off inline UPP for EnKF forecast +export WRITE_DOPOST=".false." + +# Stochastic physics parameters (only for ensemble forecasts) +export DO_SKEB="YES" +export SKEB=0.3 +export SKEB_TAU=21600. +export SKEB_LSCALE=250000. +export SKEBNORM=0 +export SKEB_NPASS=30 +export SKEB_VDOF=5 +export DO_SHUM="YES" +export SHUM=0.005 +export SHUM_TAU=21600. +export SHUM_LSCALE=500000. +export DO_SPPT="YES" +export SPPT=0.5 +export SPPT_TAU=21600. +export SPPT_LSCALE=500000. +export SPPT_LOGIT=".true." +export SPPT_SFCLIMIT=".true." + +if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" +else + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da_orig" +fi + +# FV3 model namelist parameters to over-ride +export restart_interval=${restart_interval:-6} + +# For IAU, write restarts at beginning of window also +if [ $DOIAU_ENKF = "YES" ]; then + export restart_interval="3 -1" +fi + +# wave model +export cplwav=.false. + +# ocean model resolution +case "${CASE_ENS}" in + "C48") export OCNRES=500;; + "C96") export OCNRES=100;; + "C192") export OCNRES=050;; + "C384") export OCNRES=025;; + "C768") export OCNRES=025;; + *) export OCNRES=025;; +esac +export ICERES=$OCNRES + +echo "END: config.efcs" diff --git a/parm/config/gfs/config.eobs b/parm/config/gfs/config.eobs new file mode 100644 index 0000000000..21f982addc --- /dev/null +++ b/parm/config/gfs/config.eobs @@ -0,0 +1,31 @@ +#! /usr/bin/env bash + +########## config.eobs config.eomg ########## +# Ensemble innovation specific, dependency config.anal + +echo "BEGIN: config.eobs" + +# Get task specific resources +. $EXPDIR/config.resources eobs + +# Number of enkf members per innovation job +export NMEM_EOMGGRP=8 +export RERUN_EOMGGRP="YES" +export npe_gsi=$npe_eobs + +# GSI namelist options related to observer for EnKF +export OBSINPUT_INVOBS="dmesh(1)=225.0,dmesh(2)=225.0,dmesh(3)=225.0,dmesh(4)=100.0" +export OBSQC_INVOBS="tcp_width=60.0,tcp_ermin=2.0,tcp_ermax=12.0" +if [ $LEVS = "128" ]; then + export GRIDOPTS_INVOBS="nlayers(63)=1,nlayers(64)=1," + export SETUP_INVOBS="gpstop=55,nsig_ext=56," +fi + + +export USE_RADSTAT="NO" # This can be only used when bias correction is non-zero. +export GENDIAG="YES" # Diagnostic files must be created for EnKF + +export lobsdiag_forenkf=".true." # write out jacobians from eobs + # need to specify .true. setting since config.anal sets to .false. + +echo "END: config.eobs" diff --git a/parm/config/gfs/config.epos b/parm/config/gfs/config.epos new file mode 100644 index 0000000000..8026a2ba2e --- /dev/null +++ b/parm/config/gfs/config.epos @@ -0,0 +1,20 @@ +#! /usr/bin/env bash + +########## config.epos ########## +# Ensemble post processing specific + +echo "BEGIN: config.epos" + +# Get task specific resources +. $EXPDIR/config.resources epos + +# No. of concurrent epos jobs [1 implies sequential] +export NEPOSGRP=7 +if [ $l4densvar = ".false." ]; then + export NEPOSGRP=3 +fi + +# Generate ensemble spread files +export ENKF_SPREAD="YES" + +echo "END: config.epos" diff --git a/parm/config/gfs/config.esfc b/parm/config/gfs/config.esfc new file mode 100644 index 0000000000..2bb3d48bb4 --- /dev/null +++ b/parm/config/gfs/config.esfc @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +########## config.esfc ########## +# Ensemble surface specific + +echo "BEGIN: config.esfc" + +# Get task specific resources +. $EXPDIR/config.resources esfc + +# With IAU only need surface analysis at start of IAU window. +# Set DOSFCANL_ENKF=NO to prevent creation of sfcanl at +# center of analysis window. + +if [ $DOIAU_ENKF = "YES" ]; then + export DOSFCANL_ENKF="NO" +fi + +echo "END: config.esfc" diff --git a/parm/config/gfs/config.eupd b/parm/config/gfs/config.eupd new file mode 100644 index 0000000000..1ac90d2b75 --- /dev/null +++ b/parm/config/gfs/config.eupd @@ -0,0 +1,34 @@ +#! /usr/bin/env bash + +########## config.eupd ########## +# Ensemble update specific, dependency config.anal + +echo "BEGIN: config.eupd" + +# Get task specific resources +. $EXPDIR/config.resources eupd + +export npe_enkf=$npe_eupd + +# Use NAM_ENKF below for serial EnKF +##export NAM_ENKF="analpertwtnh=0.9,analpertwtsh=0.9,analpertwttr=0.9" + +# LETKF specific settings with model space localization +export modelspace_vloc=".true." # model space localization +export letkf_flag=".true." # use LETKF instead of serial filter +export getkf=".true." # Gain form of LETKF (needed for model-space localization) +export denkf=".true." # EnKF approximation (beneficial since less spread removed by analysis) +export nobsl_max=10000 # max number of obs in each LETKF volume (uses closest nobsl_max). can + # be reduced to speed up execution time. +export analpertwt=0.85 # relaxation to prior spread inflation factor +export readin_localization_enkf=".false." # Don’t read in localization scales from file (doesn’t make + # sense for LETKF if model space localization on and nobsl_max>0) +export corrlength=1250 # Horizontal localization scale (max horizontal distance to search for nobsl_max local obs) +export lnsigcutoff=2.75 # ignored if modelspace_vloc=.true. + +export lobsdiag_forenkf=".true." # use jacobian. must be .true. if modelspace_vloc=".true." + # need to specify .true. setting since config.anal sets to .false. + +export NAM_ENKF="smoothparm=35," + +echo "END: config.eupd" diff --git a/parm/config/gfs/config.fcst b/parm/config/gfs/config.fcst new file mode 100644 index 0000000000..3b79a18287 --- /dev/null +++ b/parm/config/gfs/config.fcst @@ -0,0 +1,400 @@ +#! /usr/bin/env bash + +########## config.fcst ########## +# Forecast specific + +echo "BEGIN: config.fcst" + +# Turn off waves if not used for this CDUMP +case $WAVE_CDUMP in + both | ${CDUMP/enkf} ) ;; # Don't change + *) DO_WAVE="NO" ;; # Turn waves off +esac + +# Source model specific information that is resolution dependent +string="--fv3 $CASE" +[[ ${DO_OCN} == "YES" ]] && string="$string --mom6 $OCNRES" +[[ ${DO_ICE} == "YES" ]] && string="$string --cice6 $ICERES" +[[ ${DO_WAVE} == "YES" ]] && string="$string --ww3 ${waveGRD// /;}" +source $EXPDIR/config.ufs ${string} + +# Source component configs if necessary +for component in WAVE OCN ICE AERO; do + control="DO_${component}" + if [[ $(eval echo \$$control) == "YES" ]]; then + . $EXPDIR/config.$(echo "$component" | awk '{ print tolower($1) }') + fi +done + +# Get task specific resources +. $EXPDIR/config.resources fcst +export domains_stack_size="16000000" + + +if [[ "$DONST" = "YES" ]]; then + . $EXPDIR/config.nsst +fi + +export esmf_profile=".false." +export esmf_logkind="ESMF_LOGKIND_MULTI_ON_ERROR" #Options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE + + +####################################################################### +# COUPLING COMPONENTS + +# cpl defaults +export cpl=".false." +export cplflx=".false." +export cplice=".false." +export cplchm=".false." +export cplwav=".false." + +# cpl changes based on APP + +if [[ "$DO_COUPLED" = "YES" ]]; then + export cpl=".true." +fi +if [[ "$DO_AERO" = "YES" ]]; then + export cplchm=".true." +fi +if [[ "$DO_ICE" = "YES" ]]; then + export cplice=".true." + export cplflx=".true." +fi +if [[ "$DO_OCN" = "YES" ]]; then + export cplflx=".true." +fi +if [[ "$DO_WAVE" = "YES" ]]; then + export cplwav=".true." +fi + +####################################################################### + +export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.sh" +#export FORECASTSH="$HOMEgfs/scripts/exglobal_forecast.py" # Temp. while this is worked on +export FCSTEXECDIR="$HOMEgfs/exec" +export FCSTEXEC="ufs_model.x" + +####################################################################### +# Model configuration +export TYPE="nh" +export MONO="non-mono" + +# Use stratosphere h2o physics +export h2o_phys=".true." + +# Options of stratosphere O3 physics reaction coefficients +export new_o3forc="YES" + +export gwd_opt=2 + +# --GFS.v16 uGWD.v0, used for suite FV3_GFS_v16 and UFS p6 etc +# do_ugwp=T: use unified CGWD and OGWD, and turbulent orographic form drag (TOFD) +# do_ugwp=F: use unified CGWD but old OGWD, TOFD is not uded. +if [[ "$gwd_opt" -eq 1 ]]; then + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + + +# -- uGWD.v1, for suite FV3_GFS_v17 and FV3_GFS_v17p8b etc +if [[ "$gwd_opt" -eq 2 ]]; then + + #--used for UFS p7 and p8a + #export knob_ugwp_version=1 + #export do_ugwp=".false." + #export do_tofd=".false." + #export do_ugwp_v0=".false." + #export do_ugwp_v1=".true." + #export do_ugwp_v0_orog_only=".false." + #export do_ugwp_v0_nst_only=".false." + #export do_gsl_drag_ls_bl=".true." + #export do_gsl_drag_ss=".true." + #export do_gsl_drag_tofd=".true." + #export do_ugwp_v1_orog_only=".false." + + #--used for UFS p8 + export knob_ugwp_version=0 + export do_ugwp=".false." + export do_tofd=".false." + export do_ugwp_v0=".true." + export do_ugwp_v1=".false." + export do_ugwp_v0_orog_only=".false." + export do_ugwp_v0_nst_only=".false." + export do_gsl_drag_ls_bl=".false." + export do_gsl_drag_ss=".true." + export do_gsl_drag_tofd=".false." + export do_ugwp_v1_orog_only=".false." + export launch_level=$(echo "$LEVS/2.35" |bc) +fi + +# Sponge layer settings +export tau=0.0 ## JKH +export rf_cutoff=10 ## JKH +export fast_tau_w_sec=0.2 ## JKH +export d2_bg_k1=0.20 +export d2_bg_k2=0.04 +export dz_min=6 +export n_sponge=42 +if [[ "${LEVS}" = "128" && "${CDUMP}" =~ "gdas" ]]; then + export tau=5.0 + export rf_cutoff=1.0e3 + export d2_bg_k1=0.20 + export d2_bg_k2=0.0 +fi + +# PBL/turbulence schemes +export hybedmf=".false." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export satmedmf=".false." + export isatmedmf=0 + export shal_cnv=".false." + export do_mynnedmf=".true." + export do_mynnsfclay=".false." + export icloud_bl=1 + export bl_mynn_tkeadvect=".true." + export bl_mynn_edmf=1 + export bl_mynn_edmf_mom=1 + export lcnorm=".true." ## JKH +else + export satmedmf=".true." + export isatmedmf=1 +fi +tbf="" +if [[ "$satmedmf" = ".true." ]]; then tbf="_satmedmf" ; fi + +# Radiation options +export IAER=1011 ; #spectral band mapping method for aerosol optical properties +export iovr_lw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr_sw=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export iovr=3 ; #de-correlation length cloud overlap method (Barker, 2008) +export icliq_sw=2 ; #cloud optical coeffs from AER's newer version v3.9-v4.0 for hu and stamnes +export isubc_sw=2 +export isubc_lw=2 + +# RRTMGP radiation scheme +export do_RRTMGP=.false. +export doGP_cldoptics_LUT=.false. +export doGP_lwscat=.false. + +# LSM configuration +# NoahMP only +export iopt_sfc="3" +export iopt_trs="2" + +# Convection Options: 2-SASAS, 3-GF +export progsigma=".true." +if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_c3" ]] ; then + export imfdeepcnv=5 + export imfshalcnv=5 + export progsigma=.false. +elif [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_gf" ]] ; then + export imfdeepcnv=3 + export imfshalcnv=3 +else + export imfdeepcnv=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export imfshalcnv=-1 + else + export imfshalcnv=2 + fi +fi + +#Convection schemes ### JKH - affects field table name +tbp="" +if [ "$progsigma" = ".true." ]; then tbp="_progsigma" ; fi + + +# Microphysics configuration +export dnats=0 +export cal_pre=".true." +export do_sat_adj=".false." +export random_clds=".true." + +if [[ "$imp_physics" -eq 99 ]]; then # ZhaoCarr + export ncld=1 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_zhaocarr${tbf}${tbp}" + export nwat=2 + +elif [[ "$imp_physics" -eq 6 ]]; then # WSM6 + export ncld=2 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_wsm6${tbf}${tbp}" + export nwat=6 + +elif [[ "$imp_physics" -eq 8 ]]; then # Thompson + export nwat=6 + + export cal_pre=".false." + export random_clds=".false." + export effr_in=".true." + export lradar=".false." + export ttendlim="-999" + export dt_inner=$((DELTIM/2)) + export sedi_semi=.true. + if [[ "$sedi_semi" = .true. ]]; then export dt_inner=$DELTIM ; fi + export decfl=10 + + export dddmp=0.1 + export d4_bg=0.12 + export ncld=2 + if [[ "$CCPP_SUITE" == "FV3_GFS_v17_p8_thompson" || "$CCPP_SUITE" == "FV3_GFS_v17_p8_mynn" ]] ; then + export ltaerosol=".true." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_aero_tke${tbp}" + else + export ltaerosol=".false." + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke${tbp}" + #JKHexport FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_thompson_noaero_tke" + fi + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export n_split=4 ## JKH + +elif [[ "$imp_physics" -eq 11 ]]; then # GFDL + export ncld=5 + export FIELD_TABLE="$HOMEgfs/parm/parm_fv3diag/field_table_gfdl${tbf}${tbp}" + export nwat=6 + export dnats=1 + export cal_pre=".false." + export do_sat_adj=".true." + export random_clds=".false." + export lgfdlmprad=".true." + export effr_in=".true." + export reiflag=2 + + export hord_mt_nh_nonmono=5 + export hord_xx_nh_nonmono=5 + export vtdm4_nh_nonmono=0.02 + export nord=2 + export d4_bg=0.12 + export dddmp=0.1 + +else + echo "Unknown microphysics option, ABORT!" +fi + +# Stochastic physics +export DO_SPPT=${DO_SPPT:-"NO"} +export DO_SKEB=${DO_SKEB:-"NO"} +export DO_SHUM=${DO_SHUM:-"NO"} +export DO_LAND_PERT=${DO_LAND_PERT:-"NO"} +export DO_CA=${DO_CA:-"YES"} + +#coupling settings +export cplmode="nems_frac" +if [[ "${FRAC_GRID:-".true."}" = ".false." ]]; then + export cplmode="nems_orig" +fi +export psm_bc="1" + +export min_lakeice="0.15" +export min_seaice=${min_seaice:-"0.15"} +export use_cice_alb=${use_cice_alb:-".false."} + +export FSICL="0" +export FSICS="0" + +#--------------------------------------------------------------------- + +# ideflate: netcdf zlib lossless compression (0-9): 0 no compression +# nbits: netcdf lossy compression level (0-32): 0 lossless +export ideflate=1 +export nbits=14 +export ishuffle=0 +# compression for RESTART files written by FMS +export shuffle=1 +export deflate_level=1 + +#--------------------------------------------------------------------- +# Disable the use of coupler.res; get model start time from model_configure +export USE_COUPLER_RES="NO" + +if [[ "$CDUMP" =~ "gdas" ]] ; then # GDAS cycle specific parameters + + # Variables used in DA cycling + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table_da" + + # Write restart files, where $number is current model start time. + # restart_interval: $number + # number=0, writes out restart files at the end of forecast. + # number>0, writes out restart files at the frequency of $number and at the end of forecast. + # restart_interval: "$number -1" + # writes out restart files only once at $number forecast hour. + # restart_interval: "$number1 $number2 $number3 ..." + # writes out restart file at the specified forecast hours + export restart_interval=${restart_interval:-6} + + # For IAU, write restarts at beginning of window also + if [[ "$DOIAU" = "YES" ]]; then + export restart_interval="3 6" + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" ]]; then export cplwav=".true." ; fi + + # Turn on dry mass adjustment in GDAS + export adjust_dry_mass=".true." + +elif [[ "$CDUMP" =~ "gfs" ]] ; then # GFS cycle specific parameters + + # Write more variables to output + export DIAG_TABLE="$HOMEgfs/parm/parm_fv3diag/diag_table" + + # Write gfs restart files to rerun fcst from any break point + export restart_interval_gfs=${restart_interval_gfs:-0} + if [[ "$restart_interval_gfs" -le 0 ]]; then + export restart_interval="$FHMAX_GFS" + else + rst_list="" + IAU_OFFSET=${IAU_OFFSET:-0} + [[ $DOIAU = "NO" ]] && export IAU_OFFSET=0 + xfh=$((restart_interval_gfs+(IAU_OFFSET/2))) + while [ $xfh -le $FHMAX_GFS ]; do + rst_list="$rst_list $xfh" + xfh=$((xfh+restart_interval_gfs)) + done + export restart_interval="$rst_list" + fi + + if [[ "$DO_AERO" = "YES" ]]; then + # Make sure a restart file is written at the cadence time + if [[ ! "${restart_interval[*]}" =~ "$STEP_GFS" ]]; then + export restart_interval="$STEP_GFS $restart_interval" + fi + fi + + # Choose coupling with wave + if [[ "$DO_WAVE" = "YES" && "$WAVE_CDUMP" != "gdas" ]]; then + export cplwav=".true." + fi + + # Turn off dry mass adjustment in GFS + export adjust_dry_mass=".false." + + # Write each restart file in 16 small files to save time + if [[ "$CASE" = C768 ]]; then + export io_layout="4,4" + else + export io_layout="1,1" + fi + +fi + +if [[ "$DO_AERO" = "YES" ]]; then # temporary settings for aerosol coupling + export AERO_DIAG_TABLE="${AERO_DIAG_TABLE:-$HOMEgfs/parm/parm_fv3diag/diag_table.aero}" + export AERO_FIELD_TABLE="${AERO_FIELD_TABLE:-$HOMEgfs/parm/parm_fv3diag/field_table.aero}" + export AERO_EMIS_FIRE=$( echo "${AERO_EMIS_FIRE:-none}" | awk '{ print tolower($1) }' ) + export AERO_CONFIG_DIR="${AERO_CONFIG_DIR:-$HOMEgfs/parm/chem}" + export AERO_INPUTS_DIR="${AERO_INPUTS_DIR:-}" + export fscav_aero="${aero_conv_scav_factors:-${fscav_aero}}" + export dnats_aero="${aero_diag_tracers:-0}" +fi + +# Remember config.efcs will over-ride these values for ensemble forecasts +# if these variables are re-defined there. +# Otherwise, the ensemble forecast will inherit from config.fcst + +echo "END: config.fcst" diff --git a/parm/config/gfs/config.fit2obs b/parm/config/gfs/config.fit2obs new file mode 100644 index 0000000000..46baaa9e45 --- /dev/null +++ b/parm/config/gfs/config.fit2obs @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.fit2obs ########## +# Fit to Observations + +echo "BEGIN: config.fit2obs" + +# Get task specific resources +. "${EXPDIR}/config.resources" fit2obs + +export PRVT=${HOMEgfs}/fix/gsi/prepobs_errtable.global +export HYBLEVS=${HOMEgfs}/fix/am/global_hyblev.l${LEVS}.txt + +export VBACKUP_FITS=24 +export OUTPUT_FILETYPE="netcdf" +export CONVNETC="YES" +export ACPROFit="YES" + +if [[ ${netcdf_diag:-".false."} = ".true." ]]; then + export CONVNETC="YES" +fi + +echo "END: config.fit2obs" diff --git a/parm/config/gfs/config.fv3.nco.static b/parm/config/gfs/config.fv3.nco.static new file mode 100644 index 0000000000..dc60b2ef03 --- /dev/null +++ b/parm/config/gfs/config.fv3.nco.static @@ -0,0 +1,145 @@ +#! /usr/bin/env bash + +########## config.fv3 ########## +# FV3 model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3 for a given resolution +# User can over-ride after sourcing this config file + +if [ $# -ne 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "C48 C96 C192 C384 C768 C1152 C3072" + exit 1 + +fi + +case_in=$1 + +echo "BEGIN: config.fv3" + +export npe_node_max=128 + +# (Standard) Model resolution dependent variables +case $case_in in + "C48") + export DELTIM=450 + export layout_x=2 + export layout_y=4 + export layout_x_gfs=2 + export layout_y_gfs=4 + export npe_wav=14 + export npe_wav_gfs=14 + export nth_fv3=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="4M" + ;; + "C96") + export DELTIM=450 + export layout_x=4 + export layout_y=4 + export layout_x_gfs=4 + export layout_y_gfs=4 + export npe_wav=14 + export npe_wav_gfs=14 + export nth_fv3=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="4M" + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export npe_wav=21 + export npe_wav_gfs=21 + export nth_fv3=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP=$npe_node_max + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_GFS=$npe_node_max + export WRTIOBUF="8M" + ;; + "C384") + export DELTIM=200 + export layout_x=8 + export layout_y=8 + export layout_x_gfs=6 + export layout_y_gfs=6 + export npe_wav=35 + export npe_wav_gfs=35 + export nth_fv3=1 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP=64 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_GFS=64 + export WRTIOBUF="16M" + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + export layout_y_gfs=24 + export npe_wav=140 + export npe_wav_gfs=448 + export nth_fv3=3 + export nth_fv3_gfs=5 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP=64 + export WRITE_GROUP_GFS=8 + export WRTTASK_PER_GROUP_GFS=64 + export WRTIOBUF="32M" + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export npe_wav=140 + export npe_wav_gfs=140 + export nth_fv3=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP=$(echo "2*$npe_node_max" |bc) + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_GFS=$(echo "2*$npe_node_max" |bc) + export WRTIOBUF="48M" + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export npe_wav=140 + export npe_wav_gfs=140 + export nth_fv3=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP=$(echo "3*$npe_node_max" |bc) + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_GFS=$(echo "3*$npe_node_max" |bc) + export WRTIOBUF="64M" + ;; + *) + echo "grid $case_in not supported, ABORT!" + exit 1 + ;; +esac + +echo "END: config.fv3" diff --git a/parm/config/gfs/config.gempak b/parm/config/gfs/config.gempak new file mode 100644 index 0000000000..791770ba4a --- /dev/null +++ b/parm/config/gfs/config.gempak @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.gempak ########## +# GFS gempak step specific + +echo "BEGIN: config.gempak" + +# Get task specific resources +. $EXPDIR/config.resources gempak + +echo "END: config.gempak" diff --git a/parm/config/gfs/config.getic b/parm/config/gfs/config.getic new file mode 100644 index 0000000000..d51e2d3900 --- /dev/null +++ b/parm/config/gfs/config.getic @@ -0,0 +1,66 @@ +#! /usr/bin/env bash + +########## config.getic ########## +# Fetching GFS initial conditions specific + +echo "BEGIN: config.getic" + +# Get task specific resources +. $EXPDIR/config.resources getic + +export RETRO="NO" # YES = Pull v16 inputs from retrospective parallels; NO = use operational inputs +export gfs_ver="v16" # Default = v16 +export OPS_RES=${OPS_RES:-"C768"} # Operational resolution + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDAS_INIT_DIR=${UFS_DIR}/util/gdas_init + +export PRODHPSSDIR=/NCEPPROD/hpssprod/runhistory +export GETICSH=${GDAS_INIT_DIR}/get_v16.data.sh + +if [ ${RETRO:-"NO"} = "YES" ]; then # Retrospective parallel input + export GETICSH=${GDAS_INIT_DIR}/get_v16retro.data.sh + if [[ "$CDATE" -lt "2019060106" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro0e + elif [[ "$CDATE" -lt "2019090100" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro1e + elif [[ "$CDATE" -lt "2019101706" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16retro2e + elif [[ "$CDATE" -lt "2020122200" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2 + elif [[ "$CDATE" -le "2021032506" ]]; then + HPSSDIR=/NCEPDEV/emc-global/5year/emc.glopara/WCOSS_D/gfsv16/v16rt2n + else + set +x + echo NO DATA FOR $CDATE + exit 3 + fi +elif [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export GETICSH=${GDAS_INIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export GETICSH=${GDAS_INIT_DIR}/get_pre-v14.data.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export GETICSH=${GDAS_INIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export GETICSH=${GDAS_INIT_DIR}/get_${gfs_ver}.data.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.getic" diff --git a/parm/config/gfs/config.ice b/parm/config/gfs/config.ice new file mode 100644 index 0000000000..7bc1f80966 --- /dev/null +++ b/parm/config/gfs/config.ice @@ -0,0 +1,5 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ice" + +echo "END: config.ice" diff --git a/parm/config/gfs/config.init b/parm/config/gfs/config.init new file mode 100644 index 0000000000..fccbc719db --- /dev/null +++ b/parm/config/gfs/config.init @@ -0,0 +1,55 @@ +#! /usr/bin/env bash + +########## config.init ########## +# Prepare initial conditions + +echo "BEGIN: config.init" + +# Get task specific resources +. $EXPDIR/config.resources init + +# Get task specific resources +. $EXPDIR/config.getic + +export UFS_DIR=${HOMEgfs}/sorc/ufs_utils.fd +export GDAS_INIT_DIR=${UFS_DIR}/util/gdas_init +export EXEC_DIR=${UFS_DIR}/exec + +export CRES_HIRES=$CASE +export CRES_ENKF="" +export FRAC_ORO="yes" + +export RUNICSH=${GDAS_INIT_DIR}/run_v16.chgres.sh +if [ "${RETRO:-"NO"}" = "YES" ] || [ "$CDUMP" = "gdas" ]; then + export RUNICSH=${GDAS_INIT_DIR}/run_v16retro.chgres.sh +fi + +if [ ${RETRO:-"NO"} = "NO" ]; then # Operational input + # No ENKF data prior to 2012/05/21/00z + if [[ "$CDATE" -lt "2012052100" ]]; then + set +x + echo FATAL ERROR: SCRIPTS DO NOT SUPPORT OLD GFS DATA + elif [[ "$CDATE" -lt "2016051000" ]]; then + export gfs_ver=v12 + export RUNICSH=${GDAS_INIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2017072000" ]]; then + export gfs_ver=v13 + export RUNICSH=${GDAS_INIT_DIR}/run_pre-v14.chgres.sh + elif [[ "$CDATE" -lt "2019061200" ]]; then + export gfs_ver=v14 + export RUNICSH=${GDAS_INIT_DIR}/run_${gfs_ver}.chgres.sh + elif [[ "$CDATE" -lt "2021032100" ]]; then + export gfs_ver=v15 + export RUNICSH=${GDAS_INIT_DIR}/run_${gfs_ver}.chgres.gfs.sh + elif [[ "$CDATE" -lt "2021032106" ]]; then + # The way the v16 switch over was done, there is no complete + # set of v16 or v15 data for 2021032100. And although + # v16 was officially implemented 2021032212, the v16 prod + # tarballs were archived starting 2021032106. + set +x + echo FATAL ERROR: NO V15 OR V16 DATA FOR 2021032100 + exit 1 + fi +fi + +echo "END: config.init" diff --git a/parm/config/gfs/config.landanl b/parm/config/gfs/config.landanl new file mode 100644 index 0000000000..89bb8a4b7b --- /dev/null +++ b/parm/config/gfs/config.landanl @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +########## config.landanl ########## +# configuration common to all land analysis tasks + +echo "BEGIN: config.landanl" + +obs_list_name=gdas_land_adpsfc_only.yaml +if [[ "${cyc}" == "18" ]]; then + obs_list_name=gdas_land_prototype.yaml +fi + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/config/ +export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/land/obs/lists/${obs_list_name} +export LANDVARYAML=${HOMEgfs}/sorc/gdas.cd/parm/land/letkfoi/letkfoi.yaml +export FV3JEDI_FIX=${HOMEgfs}/fix/gdas + +export io_layout_x=@IO_LAYOUT_X@ +export io_layout_y=@IO_LAYOUT_Y@ + +export JEDIEXE=${HOMEgfs}/exec/fv3jedi_letkf.x + +echo "END: config.landanl" diff --git a/parm/config/gfs/config.landanlfinal b/parm/config/gfs/config.landanlfinal new file mode 100644 index 0000000000..242089325a --- /dev/null +++ b/parm/config/gfs/config.landanlfinal @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlfinal ########## +# Post Land Analysis specific + +echo "BEGIN: config.landanlfinal" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlfinal +echo "END: config.landanlfinal" diff --git a/parm/config/gfs/config.landanlinit b/parm/config/gfs/config.landanlinit new file mode 100644 index 0000000000..62054525c8 --- /dev/null +++ b/parm/config/gfs/config.landanlinit @@ -0,0 +1,10 @@ +#! /usr/bin/env bash + +########## config.landanlinit ########## +# Pre Land Analysis specific + +echo "BEGIN: config.landanlinit" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlinit +echo "END: config.landanlinit" diff --git a/parm/config/gfs/config.landanlrun b/parm/config/gfs/config.landanlrun new file mode 100644 index 0000000000..0f44011c1d --- /dev/null +++ b/parm/config/gfs/config.landanlrun @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.landanlrun ########## +# Land Analysis specific + +echo "BEGIN: config.landanlrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" landanlrun + +echo "END: config.landanlrun" diff --git a/parm/config/gfs/config.metp b/parm/config/gfs/config.metp new file mode 100644 index 0000000000..c90903f6a5 --- /dev/null +++ b/parm/config/gfs/config.metp @@ -0,0 +1,99 @@ +#! /usr/bin/env bash + +########## config.metp ########## +# METplus verification step specific + +echo "BEGIN: config.metp" + +# Get task specific resources +. "${EXPDIR}/config.resources" metp + +export RUN_GRID2GRID_STEP1="YES" # Run grid-to-grid verification using METplus +export RUN_GRID2OBS_STEP1="YES" # Run grid-to-obs verification using METplus +export RUN_PRECIP_STEP1="YES" # Run precip verification using METplus + + +#---------------------------------------------------------- +# METplus: Verify grid-to-grid, grid-to-obs, precipitation options +#---------------------------------------------------------- +## EMC_VERIF_GLOBAL SETTINGS +export HOMEverif_global=${HOMEgfs}/sorc/verif-global.fd +export VERIF_GLOBALSH=${HOMEverif_global}/ush/run_verif_global_in_global_workflow.sh +## INPUT DATA SETTINGS +export model=${PSLOT} +export model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export model_hpss_dir=${ATARDIR}/.. +export get_data_from_hpss="NO" +export hpss_walltime="10" +## OUTPUT SETTINGS +export model_stat_dir=${ARCDIR}/.. +export make_met_data_by="VALID" +export SENDMETVIEWER="NO" +## DATE SETTINGS +export VRFYBACK_HRS="0" +## METPLUS SETTINGS +export METplus_verbosity="INFO" +export MET_verbosity="2" +export log_MET_output_to_METplus="yes" +# GRID-TO-GRID STEP 1: gfsmetpg2g1 +export g2g1_type_list="anom pres sfc" +export g2g1_anom_truth_name="self_anl" +export g2g1_anom_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_anom_fhr_min=${FHMIN_GFS} +export g2g1_anom_fhr_max=${FHMAX_GFS} +export g2g1_anom_grid="G002" +export g2g1_anom_gather_by="VSDB" +export g2g1_pres_truth_name="self_anl" +export g2g1_pres_truth_file_format="pgbanl.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_pres_fhr_min=${FHMIN_GFS} +export g2g1_pres_fhr_max=${FHMAX_GFS} +export g2g1_pres_grid="G002" +export g2g1_pres_gather_by="VSDB" +export g2g1_sfc_truth_name="self_f00" +export g2g1_sfc_truth_file_format="pgbf00.${CDUMP}.{valid?fmt=%Y%m%d%H}.grib2" +export g2g1_sfc_fhr_min=${FHMIN_GFS} +export g2g1_sfc_fhr_max=${FHMAX_GFS} +export g2g1_sfc_grid="G002" +export g2g1_sfc_gather_by="VSDB" +export g2g1_mv_database_name="mv_${PSLOT}_grid2grid_metplus" +export g2g1_mv_database_group="NOAA NCEP" +export g2g1_mv_database_desc="Grid-to-grid METplus data for global workflow experiment ${PSLOT}" +# GRID-TO-OBS STEP 1: gfsmetpg2o1 +export g2o1_type_list="upper_air conus_sfc" +export g2o1_upper_air_msg_type_list="ADPUPA" +export g2o1_upper_air_vhr_list="00 06 12 18" +export g2o1_upper_air_fhr_min=${FHMIN_GFS} +export g2o1_upper_air_fhr_max="240" +export g2o1_upper_air_grid="G003" +export g2o1_upper_air_gather_by="VSDB" +export g2o1_conus_sfc_msg_type_list="ONLYSF ADPUPA" +export g2o1_conus_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_conus_sfc_fhr_min=${FHMIN_GFS} +export g2o1_conus_sfc_fhr_max="240" +export g2o1_conus_sfc_grid="G104" +export g2o1_conus_sfc_gather_by="VSDB" +export g2o1_polar_sfc_msg_type_list="IABP" +export g2o1_polar_sfc_vhr_list="00 03 06 09 12 15 18 21" +export g2o1_polar_sfc_fhr_min=${FHMIN_GFS} +export g2o1_polar_sfc_fhr_max="240" +export g2o1_polar_sfc_grid="G219" +export g2o1_polar_sfc_gather_by="VSDB" +export g2o1_prepbufr_data_run_hpss="NO" +export g2o1_mv_database_name="mv_${PSLOT}_grid2obs_metplus" +export g2o1_mv_database_group="NOAA NCEP" +export g2o1_mv_database_desc="Grid-to-obs METplus data for global workflow experiment ${PSLOT}" +# PRECIP STEP 1: gfsmetppcp1 +export precip1_type_list="ccpa_accum24hr" +export precip1_ccpa_accum24hr_model_bucket="06" +export precip1_ccpa_accum24hr_model_var="APCP" +export precip1_ccpa_accum24hr_model_file_format="pgbf{lead?fmt=%2H}.${CDUMP}.{init?fmt=%Y%m%d%H}.grib2" +export precip1_ccpa_accum24hr_fhr_min=${FHMIN_GFS} +export precip1_ccpa_accum24hr_fhr_max="180" +export precip1_ccpa_accum24hr_grid="G211" +export precip1_ccpa_accum24hr_gather_by="VSDB" +export precip1_obs_data_run_hpss="NO" +export precip1_mv_database_name="mv_${PSLOT}_precip_metplus" +export precip1_mv_database_group="NOAA NCEP" +export precip1_mv_database_desc="Precip METplus data for global workflow experiment ${PSLOT}" + +echo "END: config.metp" diff --git a/parm/config/gfs/config.nsst b/parm/config/gfs/config.nsst new file mode 100644 index 0000000000..235c91f08b --- /dev/null +++ b/parm/config/gfs/config.nsst @@ -0,0 +1,33 @@ +#! /usr/bin/env bash + +########## config.nsst ########## +# NSST specific + +echo "BEGIN: config.nsst" + +# NSST parameters contained within nstf_name + +# nstf_name(1) : NST_MODEL (NSST Model) : 0 = OFF, 1 = ON but uncoupled, 2 = ON and coupled +export NST_MODEL=2 + +# nstf_name(2) : NST_SPINUP : 0 = OFF, 1 = ON, +export NST_SPINUP=0 +if [[ "${PDY}${cyc}" -lt "2017072000" ]]; then + export NST_SPINUP=1 +fi + +# nstf_name(3) : NST_RESV (Reserved, NSST Analysis) : 0 = OFF, 1 = ON +export NST_RESV=0 + +# nstf_name(4,5) : ZSEA1, ZSEA2 the two depths to apply vertical average (bias correction) +export ZSEA1=0 +export ZSEA2=0 + +export NST_GSI=3 # default 0: No NST info at all; + # 1: Input NST info but not used in GSI; + # 2: Input NST info, used in CRTM simulation, no Tr analysis + # 3: Input NST info, used in both CRTM simulation and Tr analysis +export NSTINFO=0 # number of elements added in obs. data array (default = 0) +if [ $NST_GSI -gt 0 ]; then export NSTINFO=4; fi + +echo "END: config.nsst" diff --git a/parm/config/gfs/config.ocn b/parm/config/gfs/config.ocn new file mode 100644 index 0000000000..7d14e3dd52 --- /dev/null +++ b/parm/config/gfs/config.ocn @@ -0,0 +1,23 @@ +#! /usr/bin/env bash + +echo "BEGIN: config.ocn" + +# MOM_input template to use +export MOM_INPUT="MOM_input_template_${OCNRES}" + +export DO_OCN_SPPT="NO" # In MOM_input, this variable is determines OCN_SPPT (OCN_SPPT = True|False) +export DO_OCN_PERT_EPBL="NO" # In MOM_input, this variable determines PERT_EPBL (PERT_EPBL = True|False) + +# Templated variables in MOM_input_template +export MOM6_USE_LI2016="True" # set to False for restart reproducibility +export MOM6_THERMO_SPAN="False" +export MOM6_ALLOW_LANDMASK_CHANGES="False" + +if [[ "${DO_JEDIOCNVAR}" == "YES" ]]; then + export ODA_INCUPD="True" +else + export ODA_INCUPD="False" +fi +export ODA_INCUPD_NHOURS="3.0" # In MOM_input, this is time interval for applying increment + +echo "END: config.ocn" diff --git a/parm/config/gfs/config.ocnanal b/parm/config/gfs/config.ocnanal new file mode 100644 index 0000000000..36519c7f35 --- /dev/null +++ b/parm/config/gfs/config.ocnanal @@ -0,0 +1,32 @@ +#!/bin/bash + +########## config.ocnanal ########## +# configuration common to all ocean analysis tasks + +echo "BEGIN: config.ocnanal" + +export OBS_YAML_DIR=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/config +export OBS_LIST=@SOCA_OBS_LIST@ +[[ -n "${OBS_LIST}" ]] || export OBS_LIST=${HOMEgfs}/sorc/gdas.cd/parm/soca/obs/obs_list.yaml +export OBS_YAML=${OBS_LIST} +export FV3JEDI_STAGE_YAML=${HOMEgfs}/sorc/gdas.cd/test/soca/testinput/dumy.yaml +export SOCA_INPUT_FIX_DIR=@SOCA_INPUT_FIX_DIR@ +export SOCA_VARS=tocn,socn,ssh +export SABER_BLOCKS_YAML=@SABER_BLOCKS_YAML@ +export SOCA_NINNER=@SOCA_NINNER@ +export CASE_ANL=@CASE_ANL@ +export DOMAIN_STACK_SIZE=116640000 #TODO: Make the stack size reolution dependent +export JEDI_BIN=${HOMEgfs}/sorc/gdas.cd/build/bin + +# R2D2 +export R2D2_OBS_DB=shared +export R2D2_OBS_DUMP=@R2D2_OBS_DUMP@ +export R2D2_OBS_SRC=@R2D2_OBS_SRC@ +export R2D2_OBS_WINDOW=24 # TODO: Check if the R2D2 sampling DB window is still needed +export COMIN_OBS=@COMIN_OBS@ + +# NICAS +export NICAS_RESOL=@NICAS_RESOL@ +export NICAS_GRID_SIZE=@NICAS_GRID_SIZE@ + +echo "END: config.ocnanal" diff --git a/parm/config/gfs/config.ocnanalbmat b/parm/config/gfs/config.ocnanalbmat new file mode 100644 index 0000000000..024da5f51b --- /dev/null +++ b/parm/config/gfs/config.ocnanalbmat @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalbmat ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalbmat" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalbmat + +echo "END: config.ocnanalbmat" diff --git a/parm/config/gfs/config.ocnanalchkpt b/parm/config/gfs/config.ocnanalchkpt new file mode 100644 index 0000000000..c059fdba42 --- /dev/null +++ b/parm/config/gfs/config.ocnanalchkpt @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalchkpt ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalchkpt" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalchkpt + +echo "END: config.ocnanalchkpt" diff --git a/parm/config/gfs/config.ocnanalpost b/parm/config/gfs/config.ocnanalpost new file mode 100644 index 0000000000..bc4d945865 --- /dev/null +++ b/parm/config/gfs/config.ocnanalpost @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalpost ########## +# Post Ocn Analysis specific + +echo "BEGIN: config.ocnanalpost" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalpost +echo "END: config.ocnanalpost" diff --git a/parm/config/gfs/config.ocnanalprep b/parm/config/gfs/config.ocnanalprep new file mode 100644 index 0000000000..225eb089c3 --- /dev/null +++ b/parm/config/gfs/config.ocnanalprep @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalprep ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalprep" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalprep +echo "END: config.ocnanalprep" diff --git a/parm/config/gfs/config.ocnanalrun b/parm/config/gfs/config.ocnanalrun new file mode 100644 index 0000000000..5345b6c684 --- /dev/null +++ b/parm/config/gfs/config.ocnanalrun @@ -0,0 +1,11 @@ +#!/bin/bash + +########## config.ocnanalrun ########## +# Ocn Analysis specific + +echo "BEGIN: config.ocnanalrun" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalrun + +echo "END: config.ocnanalrun" diff --git a/parm/config/gfs/config.ocnanalvrfy b/parm/config/gfs/config.ocnanalvrfy new file mode 100644 index 0000000000..4eda451853 --- /dev/null +++ b/parm/config/gfs/config.ocnanalvrfy @@ -0,0 +1,10 @@ +#!/bin/bash + +########## config.ocnanalvrfy ########## +# Pre Ocn Analysis specific + +echo "BEGIN: config.ocnanalvrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" ocnanalvrfy +echo "END: config.ocnanalvrfy" diff --git a/parm/config/gfs/config.ocnpost b/parm/config/gfs/config.ocnpost new file mode 100644 index 0000000000..89304df7f4 --- /dev/null +++ b/parm/config/gfs/config.ocnpost @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.ocnpost ########## + +echo "BEGIN: config.ocnpost" + +# Get task specific resources +source $EXPDIR/config.resources ocnpost + +# Convert nemsio files to grib files using post job +#------------------------------------------- + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=2 + +echo "END: config.ocnpost" diff --git a/parm/config/gfs/config.post b/parm/config/gfs/config.post new file mode 100644 index 0000000000..8015c1b3e7 --- /dev/null +++ b/parm/config/gfs/config.post @@ -0,0 +1,45 @@ +#! /usr/bin/env bash + +########## config.post ########## +# Post specific + +echo "BEGIN: config.post" + +# Get task specific resources +. $EXPDIR/config.resources post + +# No. of concurrent post jobs [0 implies sequential] +export NPOSTGRP=42 +export OUTTYP=4 +export MODEL_OUT_FORM=netcdfpara + +# Post driver job that calls gfs_post.sh and downstream jobs +export POSTJJOBSH="$HOMEpost/jobs/JGLOBAL_POST" +export GFSDOWNSH="$HOMEpost/ush/fv3gfs_downstream_nems.sh" +export GFSDWNSH="$HOMEpost/ush/fv3gfs_dwn_nems.sh" + +export POSTGPSH="$HOMEpost/ush/gfs_post.sh" +export POSTGPEXEC="$HOMEpost/exec/upp.x" +export GOESF=NO # goes image +export FLXF=NO # grib2 flux file written by post ## JKH + +export npe_postgp=$npe_post +export nth_postgp=1 + +export GFS_DOWNSTREAM="YES" +#JKHexport downset=2 +export downset=1 ## JKH (removes creation of pgrb2b files) +if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_dwn=16 +elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_dwn=24 +elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_dwn=16 +else + export npe_dwn=24 +fi + +export GRIBVERSION='grib2' +export SENDCOM="YES" + +echo "END: config.post" diff --git a/parm/config/gfs/config.postsnd b/parm/config/gfs/config.postsnd new file mode 100644 index 0000000000..53d66bf4f6 --- /dev/null +++ b/parm/config/gfs/config.postsnd @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.postsnd ########## +# GFS bufr sounding step specific + +echo "BEGIN: config.postsnd" + +# Get task specific resources +. $EXPDIR/config.resources postsnd + +export POSTSNDSH=$HOMEgfs/jobs/JGFS_ATMOS_POSTSND +export ENDHOUR=180 +if [[ "$FHMAX_GFS" -lt "$ENDHOUR" ]] ; then export ENDHOUR=$FHMAX_GFS ; fi + +echo "END: config.postsnd" diff --git a/parm/config/gfs/config.prep b/parm/config/gfs/config.prep new file mode 100644 index 0000000000..b05b82a43e --- /dev/null +++ b/parm/config/gfs/config.prep @@ -0,0 +1,69 @@ +#! /usr/bin/env bash + +########## config.prep ########## +# Prep step specific + +echo "BEGIN: config.prep" + +# Get task specific resources +. $EXPDIR/config.resources prep + +export MAKE_PREPBUFR="YES" # if NO, will copy prepbufr from globaldump +export cdate10=${PDY}${cyc} + +# Relocation and syndata QC +export PROCESS_TROPCY=${PROCESS_TROPCY:-NO} +[[ $RUN_ENVIR == "nco" && $envir == "prod" ]] && export PROCESS_TROPCY="YES" +export DO_RELOCATE="NO" +export TROPCYQCRELOSH="$HOMEgfs/scripts/exglobal_atmos_tropcy_qc_reloc.sh" +export SENDCOM=YES + +export COMINtcvital=${COMINtcvital:-${DMPDIR}/${CDUMP}.${PDY}/${cyc}/atmos} +export COMINsyn=${COMINsyn:-$(compath.py ${envir}/com/gfs/${gfs_ver})/syndat} + +export HOMERELO=$HOMEgfs +export EXECRELO=${HOMERELO}/exec +export FIXRELO=${HOMERELO}/fix/am +export USHRELO=${HOMERELO}/ush + +# Adjust observation error for GFS v16 parallels +# +# NOTE: Remember to set OBERROR in config.anal as PRVT is set below +# +# Set default prepobs_errtable.global +export PRVT=$FIXgsi/prepobs_errtable.global + + +# Set prepobs.errtable.global for GFS v16 retrospective parallels +if [[ $RUN_ENVIR == "emc" ]]; then + if [[ "${PDY}${cyc}" -ge "2019021900" && "${PDY}${cyc}" -lt "2019110706" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019021900 + fi + +# Place GOES-15 AMVs in monitor, assimilate GOES-17 AMVs, assimilate KOMPSAT-5 gps + if [[ "${PDY}${cyc}" -ge "2019110706" && "${PDY}${cyc}" -lt "2020040718" ]]; then + export PRVT=$FIXgsi/gfsv16_historical/prepobs_errtable.global.2019110706 + fi + +# NOTE: +# As of 2020040718, gfsv16_historical/prepobs_errtable.global.2020040718 is +# identical to ../prepobs_errtable.global. Thus, the logic below is not +# needed at this time + +# Set observation errors for type 135 (T) & 235 (uv) Canadian AMDAR observations +# if [[ "${PDY}${cyc}" -ge "2020040718" && "${PDY}${cyc}" -lt "YYYMMDDHH" ]]; then +# export PRVT=$EXPDIR/prepobs_errtable.global +# fi + +fi + +# NSST bufr was created with a different set of files prior to 2020102200 +# See comments at the end of +# https://github.com/NOAA-EMC/global-workflow/issues/313 +if [[ "${PDY}${cyc}" -ge "2020102200" ]]; then + export DTYPS_nsst='sfcshp tesac bathy trkob' +else + export DTYPS_nsst='sfcshp dbuoyb mbuoyb tesac bathy trkob' +fi + +echo "END: config.prep" diff --git a/parm/config/gfs/config.preplandobs b/parm/config/gfs/config.preplandobs new file mode 100644 index 0000000000..d69b0f7f59 --- /dev/null +++ b/parm/config/gfs/config.preplandobs @@ -0,0 +1,16 @@ +#! /usr/bin/env bash + +########## config.preplandobs ########## +# Land Obs Prep specific + +echo "BEGIN: config.preplandobs" + +# Get task specific resources +. "${EXPDIR}/config.resources" preplandobs + +export FIMS_NML_TMPL="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/fims.nml.j2" +export IMS_OBS_LIST="${HOMEgfs}/sorc/gdas.cd/parm/land/prep/prep_ims.yaml" +export CALCFIMSEXE="${HOMEgfs}/exec/calcfIMS.exe" +export IMS2IODACONV="${HOMEgfs}/ush/imsfv3_scf2ioda.py" + +echo "END: config.preplandobs" diff --git a/parm/config/gfs/config.resources b/parm/config/gfs/config.resources new file mode 100644 index 0000000000..38efea7882 --- /dev/null +++ b/parm/config/gfs/config.resources @@ -0,0 +1,972 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [[ $# -ne 1 ]]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "coupled_ic aerosol_init" + echo "prep preplandobs" + echo "atmanlinit atmanlrun atmanlfinal" + echo "atmensanlinit atmensanlrun atmensanlfinal" + echo "landanlinit landanlrun landanlfinal" + echo "aeroanlinit aeroanlrun aeroanlfinal" + echo "anal sfcanl analcalc analdiag fcst post vrfy fit2obs metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "init_chem mom6ic ocnpost" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + echo "ocnanalprep ocnanalbmat ocnanalrun ocnanalchkpt ocnanalpost ocnanalvrfy" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +if [[ "${machine}" = "WCOSS2" ]]; then + export npe_node_max=128 +elif [[ "${machine}" = "JET" ]]; then + if [[ "${PARTITION_POST_BATCH}" = "sjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "xjet" ]]; then + export npe_node_max=24 + elif [[ "${PARTITION_BATCH}" = "vjet" ]]; then + export npe_node_max=16 + elif [[ "${PARTITION_BATCH}" = "kjet" ]]; then + export npe_node_max=40 + fi +elif [[ "${machine}" = "HERA" ]]; then + export npe_node_max=40 +elif [[ "${machine}" = "S4" ]]; then + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_node_max=32 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_node_max=20 + fi +elif [[ "${machine}" = "ORION" ]]; then + export npe_node_max=40 +fi + +if [[ "${step}" = "prep" ]]; then + export wtime_prep='00:30:00' + export npe_prep=4 + export npe_node_prep=2 + export nth_prep=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export is_exclusive=True + else + export memory_prep="40G" + fi + +elif [[ "${step}" = "preplandobs" ]]; then + export wtime_preplandobs="00:05:00" + npe_preplandobs=1 + export npe_preplandobs + export nth_preplandobs=1 + npe_node_preplandobs=1 + export npe_node_preplandobs + +elif [[ "${step}" = "aerosol_init" ]]; then + export wtime_aerosol_init="00:05:00" + export npe_aerosol_init=1 + export nth_aerosol_init=1 + npe_node_aerosol_init=$(echo "${npe_node_max} / ${nth_aerosol_init}" | bc) + export npe_node_aerosol_init + export NTASKS=${npe_aerosol_init} + export memory_aerosol_init="6G" + +elif [[ "${step}" = "waveinit" ]]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=12 + export nth_waveinit=1 + npe_node_waveinit=$(echo "${npe_node_max} / ${nth_waveinit}" | bc) + export npe_node_waveinit + export NTASKS=${npe_waveinit} + export memory_waveinit="2GB" + +elif [[ "${step}" = "waveprep" ]]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export nth_waveprep_gfs=1 + npe_node_waveprep=$(echo "${npe_node_max} / ${nth_waveprep}" | bc) + export npe_node_waveprep + npe_node_waveprep_gfs=$(echo "${npe_node_max} / ${nth_waveprep_gfs}" | bc) + export npe_node_waveprep_gfs + export NTASKS=${npe_waveprep} + export NTASKS_gfs=${npe_waveprep_gfs} + export memory_waveprep="100GB" + export memory_waveprep_gfs="150GB" + +elif [[ "${step}" = "wavepostsbs" ]]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + npe_node_wavepostsbs=$(echo "${npe_node_max} / ${nth_wavepostsbs}" | bc) + export npe_node_wavepostsbs + export NTASKS=${npe_wavepostsbs} + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="10GB" + +elif [[ "${step}" = "wavepostbndpnt" ]]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + npe_node_wavepostbndpnt=$(echo "${npe_node_max} / ${nth_wavepostbndpnt}" | bc) + export npe_node_wavepostbndpnt + export NTASKS=${npe_wavepostbndpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavepostbndpntbll" ]]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + npe_node_wavepostbndpntbll=$(echo "${npe_node_max} / ${nth_wavepostbndpntbll}" | bc) + export npe_node_wavepostbndpntbll + export NTASKS=${npe_wavepostbndpntbll} + export is_exclusive=True + +elif [[ "${step}" = "wavepostpnt" ]]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + npe_node_wavepostpnt=$(echo "${npe_node_max} / ${nth_wavepostpnt}" | bc) + export npe_node_wavepostpnt + export NTASKS=${npe_wavepostpnt} + export is_exclusive=True + +elif [[ "${step}" = "wavegempak" ]]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + npe_node_wavegempak=$(echo "${npe_node_max} / ${nth_wavegempak}" | bc) + export npe_node_wavegempak + export NTASKS=${npe_wavegempak} + export memory_wavegempak="1GB" + +elif [[ "${step}" = "waveawipsbulls" ]]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + npe_node_waveawipsbulls=$(echo "${npe_node_max} / ${nth_waveawipsbulls}" | bc) + export npe_node_waveawipsbulls + export NTASKS=${npe_waveawipsbulls} + export is_exclusive=True + +elif [[ "${step}" = "waveawipsgridded" ]]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + npe_node_waveawipsgridded=$(echo "${npe_node_max} / ${nth_waveawipsgridded}" | bc) + export npe_node_waveawipsgridded + export NTASKS=${npe_waveawipsgridded} + export memory_waveawipsgridded_gfs="1GB" + +elif [[ "${step}" = "atmanlinit" ]]; then + + export wtime_atmanlinit="00:10:00" + export npe_atmanlinit=1 + export nth_atmanlinit=1 + npe_node_atmanlinit=$(echo "${npe_node_max} / ${nth_atmanlinit}" | bc) + export npe_node_atmanlinit + export memory_atmanlinit="3072M" + +elif [[ "${step}" = "atmanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmanlrun="00:30:00" + npe_atmanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun + npe_atmanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmanlrun_gfs + export nth_atmanlrun=1 + export nth_atmanlrun_gfs=${nth_atmanlrun} + npe_node_atmanlrun=$(echo "${npe_node_max} / ${nth_atmanlrun}" | bc) + export npe_node_atmanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmanlfinal" ]]; then + + export wtime_atmanlfinal="00:30:00" + export npe_atmanlfinal=${npe_node_max} + export nth_atmanlfinal=1 + npe_node_atmanlfinal=$(echo "${npe_node_max} / ${nth_atmanlfinal}" | bc) + export npe_node_atmanlfinal + export is_exclusive=True + +elif [[ "${step}" = "landanlinit" || "${step}" = "landanlrun" || "${step}" = "landanlfinal" ]]; then + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=1 + layout_y=1 + ;; + *) + echo "FATAL ERROR: Resolution not supported for land analysis'" + exit 1 + esac + + export layout_x + export layout_y + + if [[ "${step}" = "landanlinit" || "${step}" = "landanlfinal" ]]; then + declare -x "wtime_${step}"="00:10:00" + declare -x "npe_${step}"=1 + declare -x "nth_${step}"=1 + temp_stepname="nth_${step}" + declare -x "npe_node_${step}"="$(echo "${npe_node_max} / ${!temp_stepname}" | bc)" + declare -x "memory_${step}"="3072M" + elif [[ "${step}" = "landanlrun" ]]; then + export wtime_landanlrun="00:30:00" + npe_landanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_landanlrun + export nth_landanlrun=1 + npe_node_landanlrun=$(echo "${npe_node_max} / ${nth_landanlrun}" | bc) + export npe_node_landanlrun + export is_exclusive=True + fi + +elif [[ "${step}" = "aeroanlinit" ]]; then + + # below lines are for creating JEDI YAML + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution not supported for aerosol analysis'" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlinit="00:10:00" + export npe_aeroanlinit=1 + export nth_aeroanlinit=1 + npe_node_aeroanlinit=$(echo "${npe_node_max} / ${nth_aeroanlinit}" | bc) + export npe_node_aeroanlinit + export memory_aeroanlinit="3072M" + +elif [[ "${step}" = "aeroanlrun" ]]; then + + case ${CASE} in + C768) + layout_x=6 + layout_y=6 + ;; + C384) + layout_x=5 + layout_y=5 + ;; + C192 | C96 | C48) + layout_x=8 + layout_y=8 + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} is not supported, ABORT!" + exit 1 + esac + + export layout_x + export layout_y + + export wtime_aeroanlrun="00:30:00" + npe_aeroanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun + npe_aeroanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_aeroanlrun_gfs + export nth_aeroanlrun=1 + export nth_aeroanlrun_gfs=1 + npe_node_aeroanlrun=$(echo "${npe_node_max} / ${nth_aeroanlrun}" | bc) + export npe_node_aeroanlrun + export is_exclusive=True + +elif [[ "${step}" = "aeroanlfinal" ]]; then + + export wtime_aeroanlfinal="00:10:00" + export npe_aeroanlfinal=1 + export nth_aeroanlfinal=1 + npe_node_aeroanlfinal=$(echo "${npe_node_max} / ${nth_aeroanlfinal}" | bc) + export npe_node_aeroanlfinal + export memory_aeroanlfinal="3072M" + +elif [[ "${step}" = "ocnanalprep" ]]; then + + export wtime_ocnanalprep="00:10:00" + export npe_ocnanalprep=1 + export nth_ocnanalprep=1 + npe_node_ocnanalprep=$(echo "${npe_node_max} / ${nth_ocnanalprep}" | bc) + export npe_node_ocnanalprep + export memory_ocnanalprep="24GB" + +elif [[ "${step}" = "ocnanalbmat" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalbmat="00:30:00" + export npe_ocnanalbmat=${npes} + export nth_ocnanalbmat=1 + export is_exclusive=True + npe_node_ocnanalbmat=$(echo "${npe_node_max} / ${nth_ocnanalbmat}" | bc) + export npe_node_ocnanalbmat + +elif [[ "${step}" = "ocnanalrun" ]]; then + npes=16 + case ${CASE} in + C384) + npes=480 + ;; + C48) + npes=16 + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + + export wtime_ocnanalrun="00:30:00" + export npe_ocnanalrun=${npes} + export nth_ocnanalrun=1 + export is_exclusive=True + npe_node_ocnanalrun=$(echo "${npe_node_max} / ${nth_ocnanalrun}" | bc) + export npe_node_ocnanalrun + +elif [[ "${step}" = "ocnanalchkpt" ]]; then + + export wtime_ocnanalchkpt="00:10:00" + export npe_ocnanalchkpt=1 + export nth_ocnanalchkpt=1 + npe_node_ocnanalchkpt=$(echo "${npe_node_max} / ${nth_ocnanalchkpt}" | bc) + export npe_node_ocnanalchkpt + case ${CASE} in + C384) + export memory_ocnanalchkpt="128GB" + ;; + C48) + export memory_ocnanalchkpt="32GB" + ;; + *) + echo "FATAL: Resolution not supported'" + exit 1 + esac + +elif [[ "${step}" = "ocnanalpost" ]]; then + + export wtime_ocnanalpost="00:30:00" + export npe_ocnanalpost=${npe_node_max} + export nth_ocnanalpost=1 + npe_node_ocnanalpost=$(echo "${npe_node_max} / ${nth_ocnanalpost}" | bc) + export npe_node_ocnanalpost + +elif [[ "${step}" = "ocnanalvrfy" ]]; then + + export wtime_ocnanalvrfy="00:35:00" + export npe_ocnanalvrfy=1 + export nth_ocnanalvrfy=1 + npe_node_ocnanalvrfy=$(echo "${npe_node_max} / ${nth_ocnanalvrfy}" | bc) + export npe_node_ocnanalvrfy + export memory_ocnanalvrfy="24GB" + +elif [[ "${step}" = "anal" ]]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=5 + export npe_anal_gfs=825 + export nth_anal_gfs=5 + if [[ "${machine}" = "WCOSS2" ]]; then + export nth_anal=8 + export nth_anal_gfs=8 + fi + if [[ "${CASE}" = "C384" ]]; then + export npe_anal=160 + export npe_anal_gfs=160 + export nth_anal=10 + export nth_anal_gfs=10 + if [[ "${machine}" = "S4" ]]; then + #On the S4-s4 partition, this is accomplished by increasing the task + #count to a multiple of 32 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=416 + export npe_anal_gfs=416 + fi + #S4 is small, so run this task with just 1 thread + export nth_anal=1 + export nth_anal_gfs=1 + export wtime_anal="02:00:00" + fi + fi + if [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_anal=84 + export npe_anal_gfs=84 + if [[ "${machine}" = "S4" ]]; then + export nth_anal=4 + export nth_anal_gfs=4 + #Adjust job count for S4 + if [[ "${PARTITION_BATCH}" = "s4" ]]; then + export npe_anal=88 + export npe_anal_gfs=88 + elif [[ "${PARTITION_BATCH}" = "ivy" ]]; then + export npe_anal=90 + export npe_anal_gfs=90 + fi + fi + fi + npe_node_anal=$(echo "${npe_node_max} / ${nth_anal}" | bc) + export npe_node_anal + export nth_cycle=${nth_anal} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "analcalc" ]]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks="${npe_analcalc}" + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + npe_node_analcalc=$(echo "${npe_node_max} / ${nth_analcalc}" | bc) + export npe_node_analcalc + export is_exclusive=True + +elif [[ "${step}" = "analdiag" ]]; then + + export wtime_analdiag="00:15:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + npe_node_analdiag=$(echo "${npe_node_max} / ${nth_analdiag}" | bc) + export npe_node_analdiag + export memory_analdiag="48GB" + +elif [[ "${step}" = "sfcanl" ]]; then + + export wtime_sfcanl="00:10:00" + export npe_sfcanl=6 + export nth_sfcanl=1 + npe_node_sfcanl=$(echo "${npe_node_max} / ${nth_sfcanl}" | bc) + export npe_node_sfcanl + export is_exclusive=True + +elif [[ "${step}" = "fcst" || "${step}" = "efcs" ]]; then + + export is_exclusive=True + + if [[ "${step}" = "fcst" ]]; then + _CDUMP_LIST=${CDUMP:-"gdas gfs"} + elif [[ "${step}" = "efcs" ]]; then + _CDUMP_LIST=${CDUMP:-"enkfgdas enkfgfs"} + fi + + # During workflow creation, we need resources for all CDUMPs and CDUMP is undefined + for _CDUMP in ${_CDUMP_LIST}; do + if [[ "${_CDUMP}" =~ "gfs" ]]; then + export layout_x=${layout_x_gfs} + export layout_y=${layout_y_gfs} + export WRITE_GROUP=${WRITE_GROUP_GFS} + export WRTTASK_PER_GROUP_PER_THREAD=${WRTTASK_PER_GROUP_PER_THREAD_GFS} + ntasks_fv3=${ntasks_fv3_gfs} + ntasks_quilt=${ntasks_quilt_gfs} + nthreads_fv3=${nthreads_fv3_gfs} + fi + + # PETS for the atmosphere dycore + (( FV3PETS = ntasks_fv3 * nthreads_fv3 )) + echo "FV3 using (nthreads, PETS) = (${nthreads_fv3}, ${FV3PETS})" + + # PETS for quilting + if [[ "${QUILTING:-}" = ".true." ]]; then + (( QUILTPETS = ntasks_quilt * nthreads_fv3 )) + (( WRTTASK_PER_GROUP = WRTTASK_PER_GROUP_PER_THREAD )) + export WRTTASK_PER_GROUP + else + QUILTPETS=0 + fi + echo "QUILT using (nthreads, PETS) = (${nthreads_fv3}, ${QUILTPETS})" + + # Total PETS for the atmosphere component + ATMTHREADS=${nthreads_fv3} + (( ATMPETS = FV3PETS + QUILTPETS )) + export ATMPETS ATMTHREADS + echo "FV3ATM using (nthreads, PETS) = (${ATMTHREADS}, ${ATMPETS})" + + # Total PETS for the coupled model (starting w/ the atmosphere) + NTASKS_TOT=${ATMPETS} + + # The mediator PETS can overlap with other components, usually it lands on the atmosphere tasks. + # However, it is suggested limiting mediator PETS to 300, as it may cause the slow performance. + # See https://docs.google.com/document/d/1bKpi-52t5jIfv2tuNHmQkYUe3hkKsiG_DG_s6Mnukog/edit + # TODO: Update reference when moved to ufs-weather-model RTD + MEDTHREADS=${nthreads_mediator:-1} + MEDPETS=${MEDPETS:-ATMPETS} + [[ "${MEDPETS}" -gt 300 ]] && MEDPETS=300 + export MEDPETS MEDTHREADS + echo "MEDIATOR using (threads, PETS) = (${MEDTHREADS}, ${MEDPETS})" + + if [[ "${DO_AERO}" = "YES" ]]; then + # GOCART shares the same grid and forecast tasks as FV3 (do not add write grid component tasks). + (( CHMTHREADS = ATMTHREADS )) + (( CHMPETS = FV3PETS )) + # Do not add to NTASKS_TOT + export CHMPETS CHMTHREADS + echo "GOCART using (threads, PETS) = (${CHMTHREADS}, ${CHMPETS})" + fi + + if [[ "${DO_WAVE}" = "YES" ]]; then + (( WAVPETS = ntasks_ww3 * nthreads_ww3 )) + (( WAVTHREADS = nthreads_ww3 )) + export WAVPETS WAVTHREADS + echo "WW3 using (threads, PETS) = (${WAVTHREADS}, ${WAVPETS})" + (( NTASKS_TOT = NTASKS_TOT + WAVPETS )) + fi + + if [[ "${DO_OCN}" = "YES" ]]; then + (( OCNPETS = ntasks_mom6 * nthreads_mom6 )) + (( OCNTHREADS = nthreads_mom6 )) + export OCNPETS OCNTHREADS + echo "MOM6 using (threads, PETS) = (${OCNTHREADS}, ${OCNPETS})" + (( NTASKS_TOT = NTASKS_TOT + OCNPETS )) + fi + + if [[ "${DO_ICE}" = "YES" ]]; then + (( ICEPETS = ntasks_cice6 * nthreads_cice6 )) + (( ICETHREADS = nthreads_cice6 )) + export ICEPETS ICETHREADS + echo "CICE6 using (threads, PETS) = (${ICETHREADS}, ${ICEPETS})" + (( NTASKS_TOT = NTASKS_TOT + ICEPETS )) + fi + + echo "Total PETS for ${_CDUMP} = ${NTASKS_TOT}" + + if [[ "${_CDUMP}" =~ "gfs" ]]; then + declare -x "npe_${step}_gfs"="${NTASKS_TOT}" + declare -x "nth_${step}_gfs"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}_gfs"="${npe_node_max}" + else + declare -x "npe_${step}"="${NTASKS_TOT}" + declare -x "nth_${step}"=1 # ESMF handles threading for the UFS-weather-model + declare -x "npe_node_${step}"="${npe_node_max}" + fi + + done + + case ${CASE} in + "C48" | "C96" | "C192") + declare -x "wtime_${step}"="00:30:00" + declare -x "wtime_${step}_gfs"="03:00:00" + ;; + "C384" | "C768" | "C1152") + declare -x "wtime_${step}"="01:00:00" + #JKHdeclare -x "wtime_${step}_gfs"="06:00:00" + declare -x "wtime_${step}_gfs"="04:00:00" ## JKH - make walltime smaller + ;; + *) + echo "FATAL ERROR: Resolution ${CASE} not supported in ${step}" + exit 1 + ;; + esac + + unset _CDUMP _CDUMP_LIST + unset NTASKS_TOT + +elif [[ "${step}" = "ocnpost" ]]; then + + export wtime_ocnpost="00:30:00" + export npe_ocnpost=1 + export npe_node_ocnpost=1 + export nth_ocnpost=1 + export memory_ocnpost="96G" + if [[ "${machine}" == "JET" ]]; then + # JET only has 88GB of requestable memory per node + # so a second node is required to meet the requiremtn + npe_ocnpost=2 + fi + +elif [[ "${step}" = "post" ]]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="00:20:00" ## JKH - make walltime smaller + #JKH export wtime_post_gfs="01:00:00" + #JKH export npe_post=126 + export npe_post=${npe_node_max} ## JKH - change to use 1 node for post + res=$(echo "${CASE}" | cut -c2-) + if (( npe_post > res )); then + export npe_post=${res} + fi + export nth_post=1 + export npe_node_post=${npe_post} + export npe_node_post_gfs=${npe_post} + export npe_node_dwn=${npe_node_max} + if [[ "${npe_node_post}" -gt "${npe_node_max}" ]]; then export npe_node_post=${npe_node_max} ; fi + if [[ "${npe_node_post_gfs}" -gt "${npe_node_max}" ]]; then export npe_node_post_gfs=${npe_node_max} ; fi + export is_exclusive=True + +elif [[ "${step}" = "wafs" ]]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=${npe_wafs} + export nth_wafs=1 + export memory_wafs="1GB" + +elif [[ "${step}" = "wafsgcip" ]]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export nth_wafsgcip=1 + export npe_node_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [[ "${step}" = "wafsgrib2" ]]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export nth_wafsgrib2=1 + npe_node_wafsgrib2=$(echo "${npe_node_max} / ${nth_wafsgrib2}" | bc) + export npe_node_wafsgrib2 + export memory_wafsgrib2="80GB" + +elif [[ "${step}" = "wafsblending" ]]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export nth_wafsblending=1 + npe_node_wafsblending=$(echo "${npe_node_max} / ${nth_wafsblending}" | bc) + export npe_node_wafsblending + export memory_wafsblending="15GB" + +elif [[ "${step}" = "wafsgrib20p25" ]]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export nth_wafsgrib20p25=1 + npe_node_wafsgrib20p25=$(echo "${npe_node_max} / ${nth_wafsgrib20p25}" | bc) + export npe_node_wafsgrib20p25 + export memory_wafsgrib20p25="80GB" + +elif [[ "${step}" = "wafsblending0p25" ]]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export nth_wafsblending0p25=1 + npe_node_wafsblending0p25=$(echo "${npe_node_max} / ${nth_wafsblending0p25}" | bc) + export npe_node_wafsblending0p25 + export memory_wafsblending0p25="15GB" + +elif [[ "${step}" = "vrfy" ]]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + if [[ "${machine}" == "HERA" ]]; then + export memory_vrfy="16384M" + fi + export is_exclusive=True + +elif [[ "${step}" = "fit2obs" ]]; then + + export wtime_fit2obs="00:20:00" + export npe_fit2obs=3 + export nth_fit2obs=1 + export npe_node_fit2obs=1 + export memory_fit2obs="20G" + if [[ "${machine}" == "WCOSS2" ]]; then export npe_node_fit2obs=3 ; fi + +elif [[ "${step}" = "metp" ]]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + export is_exclusive=True + +elif [[ "${step}" = "echgres" ]]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=${npe_node_max} + export npe_node_echgres=1 + if [[ "${machine}" = "WCOSS2" ]]; then + export memory_echgres="200GB" + fi + +elif [[ "${step}" = "init" ]]; then + + export wtime_init="00:30:00" + export npe_init=24 + export nth_init=1 + export npe_node_init=6 + export memory_init="70G" + +elif [[ "${step}" = "init_chem" ]]; then + + export wtime_init_chem="00:30:00" + export npe_init_chem=1 + export npe_node_init_chem=1 + export is_exclusive=True + +elif [[ "${step}" = "mom6ic" ]]; then + + export wtime_mom6ic="00:30:00" + export npe_mom6ic=24 + export npe_node_mom6ic=24 + export is_exclusive=True + +elif [[ "${step}" = "arch" || "${step}" = "earc" || "${step}" = "getic" ]]; then + + eval "export wtime_${step}='06:00:00'" + eval "export npe_${step}=1" + eval "export npe_node_${step}=1" + eval "export nth_${step}=1" + eval "export memory_${step}=4096M" + if [[ "${machine}" = "WCOSS2" ]]; then + eval "export memory_${step}=50GB" + fi + +elif [[ "${step}" = "coupled_ic" ]]; then + + export wtime_coupled_ic="00:15:00" + export npe_coupled_ic=1 + export npe_node_coupled_ic=1 + export nth_coupled_ic=1 + export is_exclusive=True + +elif [[ "${step}" = "atmensanlinit" ]]; then + + export wtime_atmensanlinit="00:10:00" + export npe_atmensanlinit=1 + export nth_atmensanlinit=1 + npe_node_atmensanlinit=$(echo "${npe_node_max} / ${nth_atmensanlinit}" | bc) + export npe_node_atmensanlinit + export memory_atmensanlinit="3072M" + +elif [[ "${step}" = "atmensanlrun" ]]; then + + # make below case dependent later + export layout_x=1 + export layout_y=1 + + export wtime_atmensanlrun="00:30:00" + npe_atmensanlrun=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun + npe_atmensanlrun_gfs=$(echo "${layout_x} * ${layout_y} * 6" | bc) + export npe_atmensanlrun_gfs + export nth_atmensanlrun=1 + export nth_atmensanlrun_gfs=${nth_atmensanlrun} + npe_node_atmensanlrun=$(echo "${npe_node_max} / ${nth_atmensanlrun}" | bc) + export npe_node_atmensanlrun + export is_exclusive=True + +elif [[ "${step}" = "atmensanlfinal" ]]; then + + export wtime_atmensanlfinal="00:30:00" + export npe_atmensanlfinal=${npe_node_max} + export nth_atmensanlfinal=1 + npe_node_atmensanlfinal=$(echo "${npe_node_max} / ${nth_atmensanlfinal}" | bc) + export npe_node_atmensanlfinal + export is_exclusive=True + +elif [[ "${step}" = "eobs" || "${step}" = "eomg" ]]; then + + export wtime_eobs="00:15:00" + export wtime_eomg="01:00:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eobs=200 + elif [[ "${CASE}" = "C384" ]]; then + export npe_eobs=100 + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eobs=40 + fi + export npe_eomg=${npe_eobs} + export nth_eobs=2 + export nth_eomg=${nth_eobs} + npe_node_eobs=$(echo "${npe_node_max} / ${nth_eobs}" | bc) + export npe_node_eobs + export npe_node_eomg=${npe_node_eobs} + export is_exclusive=True + #The number of tasks and cores used must be the same for eobs + #For S4, this is accomplished by running 10 tasks/node + if [[ "${machine}" = "S4" ]]; then + export npe_node_eobs=10 + fi + +elif [[ "${step}" = "ediag" ]]; then + + export wtime_ediag="00:15:00" + export npe_ediag=48 + export nth_ediag=1 + npe_node_ediag=$(echo "${npe_node_max} / ${nth_ediag}" | bc) + export npe_node_ediag + export memory_ediag="30GB" + +elif [[ "${step}" = "eupd" ]]; then + + export wtime_eupd="00:30:00" + if [[ "${CASE}" = "C768" ]]; then + export npe_eupd=480 + export nth_eupd=6 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + fi + elif [[ "${CASE}" = "C384" ]]; then + export npe_eupd=270 + export nth_eupd=2 + if [[ "${machine}" = "WCOSS2" ]]; then + export npe_eupd=315 + export nth_eupd=14 + elif [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=8 + elif [[ "${machine}" = "S4" ]]; then + export npe_eupd=160 + export nth_eupd=2 + fi + elif [[ "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then + export npe_eupd=42 + export nth_eupd=2 + if [[ "${machine}" = "HERA" || "${machine}" = "JET" ]]; then + export nth_eupd=4 + fi + fi + npe_node_eupd=$(echo "${npe_node_max} / ${nth_eupd}" | bc) + export npe_node_eupd + export is_exclusive=True + +elif [[ "${step}" = "ecen" ]]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + if [[ "${machine}" = "HERA" ]]; then export nth_ecen=6; fi + if [[ "${CASE}" = "C384" || "${CASE}" = "C192" || "${CASE}" = "C96" || "${CASE}" = "C48" ]]; then export nth_ecen=2; fi + npe_node_ecen=$(echo "${npe_node_max} / ${nth_ecen}" | bc) + export npe_node_ecen + export nth_cycle=${nth_ecen} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export is_exclusive=True + +elif [[ "${step}" = "esfc" ]]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export nth_esfc=1 + npe_node_esfc=$(echo "${npe_node_max} / ${nth_esfc}" | bc) + export npe_node_esfc + export nth_cycle=${nth_esfc} + npe_node_cycle=$(echo "${npe_node_max} / ${nth_cycle}" | bc) + export npe_node_cycle + export memory_esfc="80GB" + +elif [[ "${step}" = "epos" ]]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + if [[ "${machine}" == "HERA" ]]; then + export nth_epos=6 + fi + npe_node_epos=$(echo "${npe_node_max} / ${nth_epos}" | bc) + export npe_node_epos + export is_exclusive=True + +elif [[ "${step}" = "postsnd" ]]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + postsnd_req_cores=$(echo "${npe_node_postsnd} * ${nth_postsnd}" | bc) + if [[ ${postsnd_req_cores} -gt "${npe_node_max}" ]]; then + npe_node_postsnd=$(echo "${npe_node_max} / ${nth_postsnd}" | bc) + export npe_node_postsnd + fi + export is_exclusive=True + +elif [[ "${step}" = "awips" ]]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="3GB" + +elif [[ "${step}" = "gempak" ]]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="4GB" + export memory_gempak_gfs="2GB" + +else + + echo "Invalid step = ${step}, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/parm/config/gfs/config.resources.nco.static b/parm/config/gfs/config.resources.nco.static new file mode 100644 index 0000000000..d98e985b95 --- /dev/null +++ b/parm/config/gfs/config.resources.nco.static @@ -0,0 +1,344 @@ +#! /usr/bin/env bash + +########## config.resources ########## +# Set resource information for job tasks +# e.g. walltime, node, cores per node, memory etc. + +if [ $# -ne 1 ]; then + + echo "Must specify an input task argument to set resource variables!" + echo "argument can be any one of the following:" + echo "anal analcalc analdiag fcst post vrfy metp arch echgres" + echo "eobs ediag eomg eupd ecen esfc efcs epos earc" + echo "waveinit waveprep wavepostsbs wavepostbndpnt wavepostbndpntbll wavepostpnt" + echo "wavegempak waveawipsbulls waveawipsgridded" + echo "postsnd awips gempak" + echo "wafs wafsgrib2 wafsblending wafsgrib20p25 wafsblending0p25 wafsgcip" + exit 1 + +fi + +step=$1 + +echo "BEGIN: config.resources" + +export npe_node_max=128 + +if [ $step = "prep" -o $step = "prepbufr" ]; then + + eval "export wtime_$step='00:45:00'" + eval "export npe_$step=4" + eval "export npe_node_$step=2" + eval "export nth_$step=1" + +elif [ $step = "waveinit" ]; then + + export wtime_waveinit="00:10:00" + export npe_waveinit=11 + export nth_waveinit=1 + export npe_node_waveinit=$npe_waveinit + export NTASKS=$npe_waveinit + export memory_waveinit="2GB" + +elif [ $step = "waveprep" ]; then + + export wtime_waveprep="00:10:00" + export npe_waveprep=5 + export npe_waveprep_gfs=65 + export nth_waveprep=1 + export npe_node_waveprep=$npe_waveprep + export npe_node_waveprep_gfs=$npe_waveprep_gfs + export memory_waveprep="100GB" + export memory_waveprep_gfs="220GB" + export NTASKS=$npe_waveprep + export NTASKS_gfs=$npe_waveprep_gfs + +elif [ $step = "wavepostsbs" ]; then + + export wtime_wavepostsbs="00:20:00" + export wtime_wavepostsbs_gfs="03:00:00" + export npe_wavepostsbs=8 + export nth_wavepostsbs=1 + export npe_node_wavepostsbs=$npe_wavepostsbs + export memory_wavepostsbs="10GB" + export memory_wavepostsbs_gfs="40GB" + export NTASKS=$npe_wavepostsbs + +elif [ $step = "wavepostbndpnt" ]; then + + export wtime_wavepostbndpnt="01:00:00" + export npe_wavepostbndpnt=240 + export nth_wavepostbndpnt=1 + export npe_node_wavepostbndpnt=80 + export NTASKS=$npe_wavepostbndpnt + +elif [ $step = "wavepostbndpntbll" ]; then + + export wtime_wavepostbndpntbll="01:00:00" + export npe_wavepostbndpntbll=448 + export nth_wavepostbndpntbll=1 + export npe_node_wavepostbndpntbll=112 + export NTASKS=$npe_wavepostbndpntbll + +elif [ $step = "wavepostpnt" ]; then + + export wtime_wavepostpnt="01:30:00" + export npe_wavepostpnt=200 + export nth_wavepostpnt=1 + export npe_node_wavepostpnt=50 + export NTASKS=$npe_wavepostpnt + +elif [ $step = "wavegempak" ]; then + + export wtime_wavegempak="02:00:00" + export npe_wavegempak=1 + export nth_wavegempak=1 + export npe_node_wavegempak=$npe_wavegempak + export NTASKS=$npe_wavegempak + export memory_wavegempak="10GB" + +elif [ $step = "waveawipsbulls" ]; then + + export wtime_waveawipsbulls="00:20:00" + export npe_waveawipsbulls=1 + export nth_waveawipsbulls=1 + export npe_node_waveawipsbulls=$(echo "$npe_node_max / $nth_waveawipsbulls" | bc) + export NTASKS=$npe_waveawipsbulls + +elif [ $step = "waveawipsgridded" ]; then + + export wtime_waveawipsgridded="02:00:00" + export npe_waveawipsgridded=1 + export nth_waveawipsgridded=1 + export npe_node_waveawipsgridded=$(echo "$npe_node_max / $nth_waveawipsgridded" | bc) + export NTASKS=$npe_waveawipsgridded + export memory_waveawipsgridded_gfs="2GB" + +elif [ $step = "anal" ]; then + + export wtime_anal="00:50:00" + export wtime_anal_gfs="00:40:00" + export npe_anal=780 + export nth_anal=8 + export npe_anal_gfs=825 + export nth_anal_gfs=8 + export npe_node_anal=15 + export nth_cycle=$npe_node_max + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "analcalc" ]; then + + export wtime_analcalc="00:10:00" + export npe_analcalc=127 + export ntasks=$npe_analcalc + export nth_analcalc=1 + export nth_echgres=4 + export nth_echgres_gfs=12 + export npe_node_analcalc=$npe_node_max + +elif [ $step = "analdiag" ]; then + + export wtime_analdiag="00:10:00" + export npe_analdiag=96 # Should be at least twice npe_ediag + export nth_analdiag=1 + export npe_node_analdiag=$npe_analdiag + export memory_analdiag="48GB" + +elif [ $step = "fcst" ]; then + + export wtime_fcst="01:30:00" + export wtime_fcst_gfs="02:30:00" + export npe_fcst=$(echo "$layout_x * $layout_y * 6" | bc) + export npe_fcst_gfs=$(echo "$layout_x_gfs * $layout_y_gfs * 6" | bc) + export nth_fcst=${nth_fv3:-2} + export nth_fcst_gfs=${nth_fv3_gfs:-2} + export npe_node_fcst=32 + export npe_node_fcst_gfs=24 + +elif [ $step = "post" ]; then + + export wtime_post="00:12:00" + export wtime_post_gfs="01:00:00" + export npe_post=126 + export nth_post=1 + export npe_node_post=$npe_post + export npe_node_post_gfs=$npe_post + export npe_node_dwn=$npe_node_max + +elif [ $step = "wafs" ]; then + + export wtime_wafs="00:30:00" + export npe_wafs=1 + export npe_node_wafs=$npe_wafs + export nth_wafs=1 + export memory_wafs="5GB" + +elif [ $step = "wafsgcip" ]; then + + export wtime_wafsgcip="00:30:00" + export npe_wafsgcip=2 + export npe_node_wafsgcip=$npe_wafsgcip + export nth_wafsgcip=1 + export memory_wafsgcip="50GB" + +elif [ $step = "wafsgrib2" ]; then + + export wtime_wafsgrib2="00:30:00" + export npe_wafsgrib2=18 + export npe_node_wafsgrib2=$npe_wafsgrib2 + export nth_wafsgrib2=1 + export memory_wafsgrib2="80GB" + +elif [ $step = "wafsblending" ]; then + + export wtime_wafsblending="00:30:00" + export npe_wafsblending=1 + export npe_node_wafsblending=$npe_wafsblending + export nth_wafsblending=1 + export memory_wafsblending="1GB" + +elif [ $step = "wafsgrib20p25" ]; then + + export wtime_wafsgrib20p25="00:30:00" + export npe_wafsgrib20p25=11 + export npe_node_wafsgrib20p25=$npe_wafsgrib20p25 + export nth_wafsgrib20p25=1 + export memory_wafsgrib20p25="80GB" + +elif [ $step = "wafsblending0p25" ]; then + + export wtime_wafsblending0p25="00:30:00" + export npe_wafsblending0p25=1 + export npe_node_wafsblending0p25=$npe_wafsblending0p25 + export nth_wafsblending0p25=1 + export memory_wafsblending0p25="15GB" + +elif [ $step = "vrfy" ]; then + + export wtime_vrfy="03:00:00" + export wtime_vrfy_gfs="06:00:00" + export npe_vrfy=3 + export nth_vrfy=1 + export npe_node_vrfy=1 + export npe_vrfy_gfs=1 + export npe_node_vrfy_gfs=1 + +elif [ $step = "metp" ]; then + + export nth_metp=1 + export wtime_metp="03:00:00" + export npe_metp=4 + export npe_node_metp=4 + export wtime_metp_gfs="06:00:00" + export npe_metp_gfs=4 + export npe_node_metp_gfs=4 + +elif [ $step = "echgres" ]; then + + export wtime_echgres="00:10:00" + export npe_echgres=3 + export nth_echgres=1 + export npe_node_echgres=3 + export memory_echgres="200GB" + +elif [ $step = "arch" -o $step = "earc" -o $step = "getic" ]; then + + eval "export wtime_$step='06:00:00'" + eval "export npe_$step=1" + eval "export npe_node_$step=1" + eval "export nth_$step=1" + eval "export memory_$step=50GB" + +elif [ $step = "eobs" -o $step = "eomg" ]; then + + + export wtime_eobs="00:10:00" + export wtime_eomg="01:00:00" + export npe_eobs=480 + export nth_eobs=3 + export npe_node_eobs=40 + +elif [ $step = "ediag" ]; then + + export wtime_ediag="00:06:00" + export npe_ediag=48 + export nth_ediag=1 + export npe_node_ediag=$npe_node_max + export memory_ediag="28GB" + +elif [ $step = "eupd" ]; then + + export wtime_eupd="00:30:00" + export npe_eupd=315 + export nth_eupd=14 + export npe_node_eupd=$(echo "$npe_node_max / $nth_eupd" | bc) + +elif [ $step = "ecen" ]; then + + export wtime_ecen="00:10:00" + export npe_ecen=80 + export nth_ecen=4 + export npe_node_ecen=$(echo "$npe_node_max / $nth_ecen" | bc) + export nth_cycle=$nth_ecen + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + +elif [ $step = "esfc" ]; then + + export wtime_esfc="00:06:00" + export npe_esfc=80 + export npe_node_esfc=$npe_esfc + export nth_esfc=1 + export nth_cycle=$nth_esfc + export npe_node_cycle=$(echo "$npe_node_max / $nth_cycle" | bc) + export memory_esfc="80GB" + +elif [ $step = "efcs" ]; then + + export wtime_efcs="00:40:00" + export npe_efcs=$(echo "$layout_x * $layout_y * 6" | bc) + export nth_efcs=${nth_fv3:-2} + export npe_node_efcs=$(echo "$npe_node_max / $nth_efcs" | bc) + +elif [ $step = "epos" ]; then + + export wtime_epos="00:15:00" + export npe_epos=80 + export nth_epos=4 + export npe_node_epos=$(echo "$npe_node_max / $nth_epos" | bc) + +elif [ $step = "postsnd" ]; then + + export wtime_postsnd="02:00:00" + export npe_postsnd=40 + export nth_postsnd=8 + export npe_node_postsnd=10 + export npe_postsndcfp=9 + export npe_node_postsndcfp=1 + +elif [ $step = "awips" ]; then + + export wtime_awips="03:30:00" + export npe_awips=1 + export npe_node_awips=1 + export nth_awips=1 + export memory_awips="10GB" + +elif [ $step = "gempak" ]; then + + export wtime_gempak="03:00:00" + export npe_gempak=2 + export npe_gempak_gfs=28 + export npe_node_gempak=2 + export npe_node_gempak_gfs=28 + export nth_gempak=1 + export memory_gempak="20GB" + export memory_gempak_gfs="200GB" + +else + + echo "Invalid step = $step, ABORT!" + exit 2 + +fi + +echo "END: config.resources" diff --git a/parm/config/gfs/config.sfcanl b/parm/config/gfs/config.sfcanl new file mode 100644 index 0000000000..9592fb77c9 --- /dev/null +++ b/parm/config/gfs/config.sfcanl @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.sfcanl ########## +# GFS surface analysis specific + +echo "BEGIN: config.sfcanl" + +# Get task specific resources +. $EXPDIR/config.resources sfcanl + +echo "END: config.sfcanl" diff --git a/parm/config/gfs/config.ufs b/parm/config/gfs/config.ufs new file mode 100644 index 0000000000..0f3d381cf7 --- /dev/null +++ b/parm/config/gfs/config.ufs @@ -0,0 +1,380 @@ +#! /usr/bin/env bash + +########## config.ufs ########## +# UFS model resolution specific parameters +# e.g. time-step, processor layout, physics and dynamics parameters +# This config sets default variables for FV3, MOM6, CICE6 for their resolutions +# User can over-ride after sourcing this config file + +echo "BEGIN: config.ufs" + +if [ $# -le 1 ]; then + + echo "Must specify an input resolution argument to set variables!" + echo "argument can be any one of the following:" + echo "--fv3 C48|C96|C192|C384|C768|C1152|C3072" + echo "--mom6 500|100|025" + echo "--cice6 500|100|025" + echo "--ww3 gnh_10m;aoc_9km;gsh_15m|gwes_30m|glo_025|mx025" + + exit 1 + +fi + +# Initialize +skip_mom6=true +skip_cice6=true +skip_ww3=true +skip_mediator=true + +# Loop through named arguments +while [[ $# -gt 0 ]]; do + key="$1" + case "${key}" in + "--fv3") + fv3_res="$2" + ;; + "--mom6") + mom6_res="$2" + skip_mom6=false + ;; + "--cice6") + cice6_res="$2" + skip_cice6=false + ;; + "--ww3") + ww3_res="$2" + skip_ww3=false + ;; + *) # unknown option + echo "FATAL ERROR: Unknown option: ${key}, ABORT!" + exit 1 + ;; + esac + shift + shift +done + +# Mediator is required if any of the non-ATM components are used +if [[ "${skip_mom6}" == "false" ]] || [[ "${skip_cice6}" == "false" ]] || [[ "${skip_ww3}" == "false" ]]; then + skip_mediator=false +fi + +case "${machine}" in + "WCOSS2") + npe_node_max=128 + ;; + "HERA" | "ORION") + npe_node_max=40 + ;; + "JET") + case "${PARTITION_BATCH}" in + "xjet") + npe_node_max=24 + ;; + "vjet" | "sjet") + npe_node_max=16 + ;; + "kjet") + npe_node_max=40 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; + "S4") + case "${PARTITION_BATCH}" in + "s4") + npe_node_max=32 + ;; + "ivy") + npe_node_max=20 + ;; + *) + echo "FATAL ERROR: Unsupported ${machine} PARTITION_BATCH = ${PARTITION_BATCH}, ABORT!" + exit 1 + ;; + esac + ;; +esac +export npe_node_max + +# (Standard) Model resolution dependent variables +case "${fv3_res}" in + "C48") + export DELTIM=1200 + export layout_x=1 + export layout_y=1 + export layout_x_gfs=1 + export layout_y_gfs=1 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.071,2.1,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C96") + export DELTIM=600 + export layout_x=2 + export layout_y=2 + export layout_x_gfs=2 + export layout_y_gfs=2 + export nthreads_fv3=1 + export nthreads_fv3_gfs=1 + export cdmbgwd="0.14,1.8,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=1 + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=1 + ;; + "C192") + export DELTIM=450 + export layout_x=4 + export layout_y=6 + export layout_x_gfs=4 + export layout_y_gfs=6 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="0.23,1.5,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=5 + ;; + "C384") + export DELTIM=300 + export layout_x=6 + export layout_y=8 + export layout_x_gfs=8 + export layout_y_gfs=8 + export nthreads_fv3=1 + export nthreads_fv3_gfs=2 + export cdmbgwd="1.1,0.72,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=8 + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + ;; + "C768") + export DELTIM=150 + export layout_x=8 + export layout_y=12 + export layout_x_gfs=12 + #JKHexport layout_y_gfs=16 + export layout_y_gfs=12 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.15,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 + #JKHexport WRITE_GROUP_GFS=4 + if [[ "${machine}" == "HERA" ]] ; then + export WRITE_GROUP_GFS=1 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=14 + elif [[ "${PARTITION_BATCH}" == "vjet" ]] ; then + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 + elif [[ "${PARTITION_BATCH}" == "xjet" ]] ; then + export WRITE_GROUP_GFS=2 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=12 + fi + ;; + "C1152") + export DELTIM=120 + export layout_x=8 + export layout_y=16 + export layout_x_gfs=8 + export layout_y_gfs=16 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.10,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + "C3072") + export DELTIM=90 + export layout_x=16 + export layout_y=32 + export layout_x_gfs=16 + export layout_y_gfs=32 + export nthreads_fv3=4 + export nthreads_fv3_gfs=4 + export cdmbgwd="4.0,0.05,1.0,1.0" # mountain blocking, ogwd, cgwd, cgwd src scaling + export WRITE_GROUP=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE=10 # TODO: refine these numbers when a case is available + export WRITE_GROUP_GFS=4 + export WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS=10 # TODO: refine these numbers when a case is available + ;; + *) + echo "FATAL ERROR: Unsupported FV3 resolution = ${fv3_res}, ABORT!" + exit 1 + ;; +esac + +(( WRTTASK_PER_GROUP_PER_THREAD = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE * 6 )) +(( WRTTASK_PER_GROUP_PER_THREAD_GFS = WRTTASK_PER_GROUP_PER_THREAD_PER_TILE_GFS * 6 )) +export WRTTASK_PER_GROUP_PER_THREAD +export WRTTASK_PER_GROUP_PER_THREAD_GFS + +(( ntasks_fv3 = layout_x * layout_y * 6 )) +(( ntasks_fv3_gfs = layout_x_gfs * layout_y_gfs * 6 )) +export ntasks_fv3 +export ntasks_fv3_gfs + +(( ntasks_quilt = WRITE_GROUP * WRTTASK_PER_GROUP_PER_THREAD )) +(( ntasks_quilt_gfs = WRITE_GROUP_GFS * WRTTASK_PER_GROUP_PER_THREAD_GFS )) +export ntasks_quilt +export ntasks_quilt_gfs + +# Determine whether to use parallel NetCDF based on resolution +case ${fv3_res} in + "C48" | "C96" | "C192" | "C384") + OUTPUT_FILETYPE_ATM="netcdf" + OUTPUT_FILETYPE_SFC="netcdf" + ;; + "C768" | "C1152" | "C3072") + OUTPUT_FILETYPE_ATM="netcdf_parallel" + OUTPUT_FILETYPE_SFC="netcdf_parallel" + ;; +esac +export OUTPUT_FILETYPE_ATM OUTPUT_FILETYPE_SFC + +# Mediator specific settings +if [[ "${skip_mediator}" == "false" ]]; then + export nthreads_mediator=${nthreads_fv3} # Use same threads as FV3 +fi + +# MOM6 specific settings +if [[ "${skip_mom6}" == "false" ]]; then + nthreads_mom6=1 + case "${mom6_res}" in + "500") + ntasks_mom6=8 + OCNTIM=3600 + NX_GLB=72 + NY_GLB=35 + DT_DYNAM_MOM6='3600' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='r' + MOM6_RIVER_RUNOFF='False' + ;; + "100") + ntasks_mom6=20 + OCNTIM=3600 + NX_GLB=360 + NY_GLB=320 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="" + CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='False' + ;; + "50") + ntasks_mom6=60 + OCNTIM=3600 + NX_GLB=720 + NY_GLB=576 + DT_DYNAM_MOM6='1800' + DT_THERM_MOM6='3600' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RESTART_SETTING='n' + MOM6_RIVER_RUNOFF='True' + ;; + "025") + ntasks_mom6=220 + OCNTIM=1800 + NX_GLB=1440 + NY_GLB=1080 + DT_DYNAM_MOM6='900' + DT_THERM_MOM6='1800' + FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" + CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" + MOM6_RIVER_RUNOFF='True' + MOM6_RESTART_SETTING="r" + ;; + *) + echo "FATAL ERROR: Unsupported MOM6 resolution = ${mom6_res}, ABORT!" + exit 1 + ;; + esac + export nthreads_mom6 ntasks_mom6 + export OCNTIM + export NX_GLB NY_GLB + export DT_DYNAM_MOM6 DT_THERM_MOM6 + export FRUNOFF + export CHLCLIM + export MOM6_RIVER_RUNOFF + export MOM6_RESTART_SETTING +fi + +# CICE6 specific settings +if [[ "${skip_cice6}" == "false" ]]; then + # Ensure we sourced the MOM6 section + if [[ "${skip_mom6}" == "true" ]]; then + echo "FATAL ERROR: CICE6 cannot be configured without MOM6, ABORT!" + exit 1 + fi + nthreads_cice6=${nthreads_mom6} # CICE6 needs to run on same threads as MOM6 + case "${cice6_res}" in + "500") + ntasks_cice6=4 + cice6_processor_shape="slenderX1" + ;; + "100") + ntasks_cice6=10 + cice6_processor_shape="slenderX2" + ;; + "050") + ntasks_cice6=30 + cice6_processor_shape="slenderX2" + ;; + "025") + ntasks_cice6=120 + cice6_processor_shape="slenderX2" + ;; + *) + echo "FATAL ERROR: Unsupported CICE6 resolution = ${cice6_res}, ABORT!" + exit 1 + ;; + esac + # NX_GLB and NY_GLB are set in the MOM6 section above + # CICE6 runs on the same domain decomposition as MOM6 + export nthreads_cice6 ntasks_cice6 + export cice6_processor_shape +fi + +# WW3 specific settings +if [[ "${skip_ww3}" == "false" ]]; then + nthreads_ww3=2 + case "${ww3_res}" in + "gnh_10m;aoc_9km;gsh_15m") + ntasks_ww3=140 + ;; + "gwes_30m") + ntasks_ww3=100 + ;; + "glo_025") + ntasks_ww3=262 + ;; + "mx025") + ntasks_ww3=80 + ;; + *) + echo "FATAL ERROR: Unsupported WW3 resolution = ${ww3_res}, ABORT!" + exit 1 + ;; + esac + export ntasks_ww3 nthreads_ww3 +fi + +echo "END: config.ufs" diff --git a/parm/config/gfs/config.vrfy b/parm/config/gfs/config.vrfy new file mode 100644 index 0000000000..3953cac0c1 --- /dev/null +++ b/parm/config/gfs/config.vrfy @@ -0,0 +1,111 @@ +#! /usr/bin/env bash + +########## config.vrfy ########## +# Verification step specific + +echo "BEGIN: config.vrfy" + +# Get task specific resources +. "${EXPDIR}/config.resources" vrfy + +export CDFNL="gdas" # Scores verification against GDAS/GFS analysis +export MKPGB4PRCP="NO" # Make 0.25-deg pgb files in ARCDIR for precip verification ## JKH +export VRFYRAD="NO" # Radiance data assimilation monitoring ## JKH +export VRFYOZN="NO" # Ozone data assimilation monitoring ## JKH +export VRFYMINMON="NO" # GSI minimization monitoring ## JKH +export VRFYTRAK="YES" # Hurricane track verification +export VRFYGENESIS="NO" # Cyclone genesis verification ## JKH +export VRFYFSU="NO" # Cyclone genesis verification (FSU) +export RUNMOS="NO" # whether to run entire MOS package + +#---------------------------------------------------------- +# Minimization, Radiance and Ozone Monitoring +#---------------------------------------------------------- + +if [[ ${VRFYRAD} = "YES" || ${VRFYMINMON} = "YES" || ${VRFYOZN} = "YES" ]]; then + + export envir="para" + export COM_IN=${ROTDIR} + + # Radiance Monitoring + if [[ "${VRFYRAD}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export RADMON_SUFFIX=${PSLOT} + export TANKverf="${NOSCRUB}/monitor/radmon" + export VRFYRADSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFRAD" + + fi + + # Minimization Monitoring + if [[ "${VRFYMINMON}" = "YES" ]] ; then + + export MINMON_SUFFIX=${PSLOT} + export M_TANKverf="${NOSCRUB}/monitor/minmon" + if [[ "${RUN}" = "gdas" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VMINMON" + elif [[ "${RUN}" = "gfs" ]] ; then + export VRFYMINSH="${HOMEgfs}/jobs/JGFS_ATMOS_VMINMON" + fi + + fi + + # Ozone Monitoring + if [[ "${VRFYOZN}" == "YES" && "${RUN}" == "${CDFNL}" ]] ; then + + export HOMEgfs_ozn="${HOMEgfs}" + export OZNMON_SUFFIX=${PSLOT} + export TANKverf_ozn="${NOSCRUB}/monitor/oznmon" + export VRFYOZNSH="${HOMEgfs}/jobs/JGDAS_ATMOS_VERFOZN" + + fi + +fi + + +#------------------------------------------------- +# Cyclone genesis and cyclone track verification +#------------------------------------------------- + +#JKHexport ens_tracker_ver=feature-GFSv17_com_reorg # TODO - temporary ahead of new tag/version +export ens_tracker_ver=v1.1.15.6 ## JKH +export HOMEens_tracker=$BASE_GIT/TC_tracker/${ens_tracker_ver} + +if [[ "${VRFYTRAK}" = "YES" ]]; then + + export TRACKERSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_TRACKER" + COMINsyn=${COMINsyn:-$(compath.py "${envir}"/com/gfs/"${gfs_ver}")/syndat} + export COMINsyn + if [[ "${RUN}" = "gdas" ]]; then + export FHOUT_CYCLONE=3 + export FHMAX_CYCLONE=${FHMAX} + else + export FHOUT_CYCLONE=6 + FHMAX_CYCLONE=$(( FHMAX_GFS<240 ? FHMAX_GFS : 240 )) + export FHMAX_CYCLONE + fi +fi + + +if [[ "${VRFYGENESIS}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISSH="${HOMEgfs}/jobs/JGFS_ATMOS_CYCLONE_GENESIS" +fi + +if [[ "${VRFYFSU}" == "YES" && "${RUN}" == "gfs" ]]; then + + export GENESISFSU="${HOMEgfs}/jobs/JGFS_ATMOS_FSU_GENESIS" +fi + +if [[ "${RUNMOS}" == "YES" && "${RUN}" == "gfs" ]]; then + + if [[ "${machine}" = "HERA" ]] ; then + export RUNGFSMOSSH="${HOMEgfs}/scripts/run_gfsmos_master.sh.hera" + else + echo "WARNING: MOS package is not enabled on ${machine}!" + export RUNMOS="NO" + export RUNGFSMOSSH="" + fi +fi + + +echo "END: config.vrfy" diff --git a/parm/config/gfs/config.wafs b/parm/config/gfs/config.wafs new file mode 100644 index 0000000000..fe2ba8cae7 --- /dev/null +++ b/parm/config/gfs/config.wafs @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafs ########## + +echo "BEGIN: config.wafs" + +# Get task specific resources +. $EXPDIR/config.resources wafs + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafs" diff --git a/parm/config/gfs/config.wafsblending b/parm/config/gfs/config.wafsblending new file mode 100644 index 0000000000..e49ffbdb88 --- /dev/null +++ b/parm/config/gfs/config.wafsblending @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending ########## + +echo "BEGIN: config.wafsblending" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending" diff --git a/parm/config/gfs/config.wafsblending0p25 b/parm/config/gfs/config.wafsblending0p25 new file mode 100644 index 0000000000..947baab2bb --- /dev/null +++ b/parm/config/gfs/config.wafsblending0p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsblending0p25 ########## + +echo "BEGIN: config.wafsblending0p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsblending0p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsblending0p25" diff --git a/parm/config/gfs/config.wafsgcip b/parm/config/gfs/config.wafsgcip new file mode 100644 index 0000000000..4909795c30 --- /dev/null +++ b/parm/config/gfs/config.wafsgcip @@ -0,0 +1,17 @@ +#! /usr/bin/env bash + +########## config.wafsgcip ########## + +echo "BEGIN: config.wafsgcip" + +# Get task specific resources +. $EXPDIR/config.resources wafsgcip + +export COMIN=$COMINatmos +export COMINgfs=$COMIN +export COMOUT=$COMOUTatmos +export SENDCOM="YES" +export COMLISTROOT="$UTILROOT/save/config" +export COMDATEROOT="N/A" + +echo "END: config.wafsgcip" diff --git a/parm/config/gfs/config.wafsgrib2 b/parm/config/gfs/config.wafsgrib2 new file mode 100644 index 0000000000..0d657788e0 --- /dev/null +++ b/parm/config/gfs/config.wafsgrib2 @@ -0,0 +1,15 @@ +#! /usr/bin/env bash + +########## config.wafsgrib2 ########## +# Post specific + +echo "BEGIN: config.wafsgrib2" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib2 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib2" diff --git a/parm/config/gfs/config.wafsgrib20p25 b/parm/config/gfs/config.wafsgrib20p25 new file mode 100644 index 0000000000..40cf80df22 --- /dev/null +++ b/parm/config/gfs/config.wafsgrib20p25 @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.wafsgrib20p25 ########## + +echo "BEGIN: config.wafsgrib20p25" + +# Get task specific resources +. $EXPDIR/config.resources wafsgrib20p25 + +export COMIN=$COMINatmos +export COMOUT=$COMOUTatmos +export SENDCOM="YES" + +echo "END: config.wafsgrib20p25" diff --git a/parm/config/gfs/config.wave b/parm/config/gfs/config.wave new file mode 100644 index 0000000000..ba7b7ad259 --- /dev/null +++ b/parm/config/gfs/config.wave @@ -0,0 +1,193 @@ +#! /usr/bin/env bash + +########## config.wave ########## +# Wave steps specific + +echo "BEGIN: config.wave" + +# Parameters that are common to all wave model steps + +# System and version +export wave_sys_ver=v1.0.0 + +export EXECwave="$HOMEgfs/exec" +export FIXwave="$HOMEgfs/fix/wave" +export PARMwave="$HOMEgfs/parm/wave" +export USHwave="$HOMEgfs/ush" + +# This config contains variables/parameters used in the fcst step +# Some others are also used across the workflow in wave component scripts + +# General runtime labels +export CDUMPwave="${RUN}wave" + +# In GFS/GDAS, restart files are generated/read from gdas runs +export CDUMPRSTwave="gdas" + +# Grids for wave model +export waveGRD=${waveGRD:-'mx025'} + +#grid dependent variable defaults +export waveGRDN='1' # grid number for ww3_multi +export waveGRDG='10' # grid group for ww3_multi +export USE_WAV_RMP='NO' # YES/NO rmp grid remapping pre-processed coefficients +export waveMULTIGRID='.false.' # .true./.false. for multi or shel +export MESH_WAV="mesh.${waveGRD}.nc" # Mesh grid for wave model for CMEPS +export waveesmfGRD=' ' # input grid for multigrid + +#Grid dependent variables for various grids +case "${waveGRD}" in + "gnh_10m;aoc_9km;gsh_15m") + #GFSv16 settings: + export waveGRDN='1 2 3' + export waveGRDG='10 20 30' + export USE_WAV_RMP='YES' + export waveMULTIGRID='.true.' + export IOSRV='3' + export MESH_WAV=' ' + export waveesmfGRD='glox_10m' + export waveuoutpGRD='points' + export waveinterpGRD='glo_15mxt at_10m ep_10m wc_10m glo_30m' + export wavepostGRD='gnh_10m aoc_9km gsh_15m' + ;; + "gwes_30m") + #Grid used for P8 + export waveinterpGRD=' ' + export wavepostGRD='gwes_30m' + ;; + "mx025") + #Grid used for HR1 (tripolar 1/4 deg) + export waveinterpGRD='reg025' + export wavepostGRD=' ' + ;; + "glo_025") + #GEFSv13 regular lat/lon 1/4 deg grid + export waveinterpGRD=' ' + export wavepostGRD='glo_025' + ;; + *) + echo "No grid specific wave config values" + ;; +esac + +# Grids for input wind fields +export WAVEWND_DID= +export WAVEWND_FID= + +# Grids for output fields (used in all steps) +export waveuoutpGRD=${waveuoutpGRD:-${waveGRD}} #unified point output grid +export waveinterpGRD=${waveinterpGRD:-'glo_15mxt at_10m ep_10m wc_10m glo_30m'} # Grids that need to be interpolated from native + # in POST will generate grib unless gribOK not set +export wavepostGRD=${wavepostGRD:-${waveGRD}} # Native computational grids that will be post-processed (grib2) + + +# The start time reflects the number of hindcast hours prior to the cycle initial time +if [ "$CDUMP" = "gdas" ]; then + export FHMAX_WAV=${FHMAX:-9} +else + export FHMAX_WAV=$FHMAX_GFS +fi +export WAVHINDH=${WAVHINDH:-0} +export FHMIN_WAV=${FHMIN_WAV:-0} +export FHOUT_WAV=${FHOUT_WAV:-3} +export FHMAX_HF_WAV=${FHMAX_HF_WAV:-120} +export FHOUT_HF_WAV=${FHOUT_HF_WAV:-1} +export FHMAX_WAV_IBP=180 +if (( FHMAX_WAV < FHMAX_WAV_IBP )); then export FHMAX_WAV_IBP=${FHMAX_GFS} ; fi + +# gridded and point output rate +export DTFLD_WAV=$(expr $FHOUT_HF_WAV \* 3600) +export DTPNT_WAV=${DTPNT_WAV:-3600} +export FHINCP_WAV=$(expr $DTPNT_WAV / 3600) + +# Selected output parameters (gridded) +export OUTPARS_WAV=${OUTPARS_WAV:-"WND HS FP DP PHS PTP PDIR"} + +# Restart file config +if [ "$CDUMP" = "gdas" ]; then + export WAVNCYC=4 + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-48} # RTOFS forecasts only out to 8 days +elif [ ${gfs_cyc} -ne 0 ]; then + export WAVHCYC=${assim_freq:-6} + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +else + export WAVHCYC=0 + export FHMAX_WAV_CUR=${FHMAX_WAV_CUR:-192} # RTOFS forecasts only out to 8 days +fi + +# Restart timing business + +export RSTTYPE_WAV='T' # generate second tier of restart files +if [ "${CDUMP}" != gfs ]; then # Setting is valid for GDAS and GEFS + export DT_1_RST_WAV=10800 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=43200 # restart stride for checkpointing restart + export RSTIOFF_WAV=0 # first restart file offset relative to model start +else # This is a GFS run + rst_dt_gfs=$(( restart_interval_gfs * 3600 )) + if [ $rst_dt_gfs -gt 0 ]; then + export DT_1_RST_WAV=${rst_dt_gfs:-0} # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_gfs:-0} # restart stride for checkpointing restart + else + rst_dt_fhmax=$(( FHMAX_WAV * 3600 )) + export DT_1_RST_WAV=0 # time between restart files, set to DTRST=1 for a single restart file + export DT_2_RST_WAV=${rst_dt_fhmax:-0} # use checkpoint restart file name for creating restart at end of run + fi + export RSTIOFF_WAV=0 # first restart file offset relative to model start +fi +# +# Set runmember to default value if not GEFS cpl run +# (for a GFS coupled run, RUNMEN would be unset, this should default to -1) +export RUNMEM=${RUNMEM:--1} +# Set wave model member tags if ensemble run +# -1: no suffix, deterministic; xxxNN: extract two last digits to make ofilename prefix=gwesNN +if [ $RUNMEM = -1 ]; then +# No suffix added to model ID in case of deterministic run + export waveMEMB= +else +# Extract member number only + export waveMEMB=$(echo $RUNMEM | grep -o '..$') +fi + +# Determine if wave component needs input and/or is coupled +export WW3ATMINP='CPL' +if [[ $DO_ICE == "YES" ]]; then + export WW3ICEINP='CPL' + export WAVEICE_FID= +else + export WW3ICEINP='YES' + export WAVEICE_FID=glix_10m +fi + +export WAVECUR_DID=rtofs +if [[ $DO_OCN == "YES" ]]; then + export WW3CURINP='CPL' + export WAVECUR_FID= +else + export WW3CURINP='YES' + export WAVECUR_FID=glix_10m +fi + +# Determine if input is from perturbed ensemble (T) or single input file (F) for all members +export WW3ATMIENS='F' +export WW3ICEIENS='F' +export WW3CURIENS='F' + +export GOFILETYPE=1 # GOFILETYPE=1 one gridded file per output step +export POFILETYPE=1 # POFILETYPE=1 one point file per output step + +# Parameters for ww3_multi/shel.inp +# Unified output T or F +export FUNIPNT='T' +# Output server type (see ww3_shel/multi.inp in WW3 repo) +export IOSRV=${IOSRV:-'1'} +# Flag for dedicated output process for unified points +export FPNTPROC='T' +# Flag for grids sharing dedicated output processes +export FGRDPROC='F' +# Flag for masking computation in two-way nesting +export FLAGMASKCOMP="F" +# Flag for masking at printout time. +export FLAGMASKOUT="F" + +echo "END: config.wave" diff --git a/parm/config/gfs/config.waveawipsbulls b/parm/config/gfs/config.waveawipsbulls new file mode 100644 index 0000000000..fd21869355 --- /dev/null +++ b/parm/config/gfs/config.waveawipsbulls @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsbulls ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsbulls" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsbulls + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsbulls" diff --git a/parm/config/gfs/config.waveawipsgridded b/parm/config/gfs/config.waveawipsgridded new file mode 100644 index 0000000000..6896ec8bd2 --- /dev/null +++ b/parm/config/gfs/config.waveawipsgridded @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveawipsgridded ########## +# Wave steps specific + +echo "BEGIN: config.waveawipsgridded" + +# Get task specific resources +. $EXPDIR/config.resources waveawipsgridded + +export DBNROOT=/dev/null +export SENDCOM="YES" + +echo "END: config.waveawipsgridded" diff --git a/parm/config/gfs/config.wavegempak b/parm/config/gfs/config.wavegempak new file mode 100644 index 0000000000..da76c364ce --- /dev/null +++ b/parm/config/gfs/config.wavegempak @@ -0,0 +1,13 @@ +#! /usr/bin/env bash + +########## config.wavegempak ########## +# Wave steps specific + +echo "BEGIN: config.wavegempak" + +# Get task specific resources +. $EXPDIR/config.resources wavegempak + +export SENDCOM="YES" + +echo "END: config.wavegempak" diff --git a/parm/config/gfs/config.waveinit b/parm/config/gfs/config.waveinit new file mode 100644 index 0000000000..61715f7f01 --- /dev/null +++ b/parm/config/gfs/config.waveinit @@ -0,0 +1,14 @@ +#! /usr/bin/env bash + +########## config.waveinit ########## +# Wave steps specific + +echo "BEGIN: config.waveinit" + +# Get task specific resources +. $EXPDIR/config.resources waveinit + +# Step label +export sigMODE=${sigMODE:-init} + +echo "END: config.waveinit" diff --git a/parm/config/gfs/config.wavepostbndpnt b/parm/config/gfs/config.wavepostbndpnt new file mode 100644 index 0000000000..dfeddc79b2 --- /dev/null +++ b/parm/config/gfs/config.wavepostbndpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpnt + +echo "END: config.wavepostbndpnt" diff --git a/parm/config/gfs/config.wavepostbndpntbll b/parm/config/gfs/config.wavepostbndpntbll new file mode 100644 index 0000000000..bb7224cc70 --- /dev/null +++ b/parm/config/gfs/config.wavepostbndpntbll @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostbndpntbll ########## +# Wave steps specific + +echo "BEGIN: config.wavepostbndpntbll" + +# Get task specific resources +. $EXPDIR/config.resources wavepostbndpntbll + +echo "END: config.wavepostbndpntbll" diff --git a/parm/config/gfs/config.wavepostpnt b/parm/config/gfs/config.wavepostpnt new file mode 100644 index 0000000000..8befb91760 --- /dev/null +++ b/parm/config/gfs/config.wavepostpnt @@ -0,0 +1,11 @@ +#! /usr/bin/env bash + +########## config.wavepostpnt ########## +# Wave steps specific + +echo "BEGIN: config.wavepostpnt" + +# Get task specific resources +. $EXPDIR/config.resources wavepostpnt + +echo "END: config.wavepostpnt" diff --git a/parm/config/gfs/config.wavepostsbs b/parm/config/gfs/config.wavepostsbs new file mode 100644 index 0000000000..f9f8c81d44 --- /dev/null +++ b/parm/config/gfs/config.wavepostsbs @@ -0,0 +1,28 @@ +#! /usr/bin/env bash + +########## config.wavepostsbs ########## +# Wave steps specific + +echo "BEGIN: config.wavepostsbs" + +# Get task specific resources +. $EXPDIR/config.resources wavepostsbs + +# Subgrid info for grib2 encoding +export WAV_SUBGRBSRC="" +export WAV_SUBGRB="" + +# Options for point output (switch on/off boundary point output) +export DOIBP_WAV='NO' # Input boundary points +export DOFLD_WAV='YES' # Field data +export DOPNT_WAV='YES' # Station data +export DOGRB_WAV='YES' # Create grib2 files +if [[ -z ${waveinterpGRD} ]]; then + export DOGRI_WAV='YES' # Create interpolated grids +else + export DOGRI_WAV='NO' # Do not create interpolated grids +fi +export DOSPC_WAV='YES' # Spectral post +export DOBLL_WAV='YES' # Bulletin post + +echo "END: config.wavepostsbs" diff --git a/parm/config/gfs/config.waveprep b/parm/config/gfs/config.waveprep new file mode 100644 index 0000000000..1c9a40c1d8 --- /dev/null +++ b/parm/config/gfs/config.waveprep @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +########## config.waveprep ########## +# Wave steps specific + +echo "BEGIN: config.waveprep" + +# Get task specific resources +. $EXPDIR/config.resources waveprep + +# Step label +export sigMODE=${sigMODE:-prep} + +# Intake currents settings +export WAV_CUR_DT=${WAV_CUR_DT:-3} +export WAV_CUR_HF_DT=${WAV_CUR_HF_DT:-3} +export WAV_CUR_HF_FH=${WAV_CUR_HF_FH:-0} +export WAV_CUR_CDO_SMOOTH="NO" + +# Location of CDO module +export CDO_ROOT=${CDO_ROOT:-/usrx/local/dev/packages/cdo/1.9.8} + +if [ "${WW3ICEINP}" = "YES" ]; then + export WAVICEFILE=${CDUMP}.t${cyc}z.seaice.5min.grib2 +fi + +echo "END: config.waveprep" diff --git a/parm/config/gfs/yaml/defaults.yaml b/parm/config/gfs/yaml/defaults.yaml new file mode 100644 index 0000000000..202efc2270 --- /dev/null +++ b/parm/config/gfs/yaml/defaults.yaml @@ -0,0 +1,25 @@ +base: + DO_JEDIATMVAR: "NO" + DO_JEDIATMENS: "NO" + DO_JEDIOCNVAR: "NO" + DO_JEDILANDDA: "NO" + DO_MERGENSST: "NO" +aeroanl: + IO_LAYOUT_X: 1 + IO_LAYOUT_Y: 1 + +landanl: + IO_LAYOUT_X: 1 + IO_LAYOUT_Y: 1 + +ocnanal: + SOCA_INPUT_FIX_DIR: '/scratch2/NCEPDEV/ocean/Guillaume.Vernieres/data/static/72x35x25' + CASE_ANL: 'C48' + SOCA_OBS_LIST: '' + COMIN_OBS: '/scratch2/NCEPDEV/marineda/r2d2' + SABER_BLOCKS_YAML: '' + SOCA_NINNER: 50 + R2D2_OBS_SRC: 'gdas_marine' + R2D2_OBS_DUMP: 's2s_v1' + NICAS_RESOL: 1 + NICAS_GRID_SIZE: 15000 diff --git a/parm/config/gfs/yaml/test_ci.yaml b/parm/config/gfs/yaml/test_ci.yaml new file mode 100644 index 0000000000..bb9602be59 --- /dev/null +++ b/parm/config/gfs/yaml/test_ci.yaml @@ -0,0 +1,4 @@ +defaults: + !INC {{ HOMEgfs }}/parm/config/gfs/yaml/defaults.yaml +base: + ACCOUNT: "nems" diff --git a/parm/mom6/MOM_input_template_025 b/parm/mom6/MOM_input_template_025 index 3abbf2191b..6c0779f426 100644 --- a/parm/mom6/MOM_input_template_025 +++ b/parm/mom6/MOM_input_template_025 @@ -6,7 +6,6 @@ ! This MOM_input file typically contains only the non-default values that are needed to reproduce this example. ! A full list of parameters for this example can be found in the corresponding MOM_parameter_doc.all file ! which is generated by the model at run-time. - ! === module MOM_domains === TRIPOLAR_N = True ! [Boolean] default = False ! Use tripolar connectivity at the northern edge of the domain. With @@ -406,7 +405,7 @@ GILL_EQUATORIAL_LD = True ! [Boolean] default = False ! radius, otherwise, if false, use Pedlosky's definition. These definitions ! differ by a factor of 2 in front of the beta term in the denominator. Gill's ! is the more appropriate definition. -INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True +INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True ! If true, use a more robust estimate of the first mode wave speed as the ! starting point for iterations. @@ -510,6 +509,9 @@ USE_LAND_MASK_FOR_HVISC = False ! [Boolean] default = False HMIX_FIXED = 0.5 ! [m] ! The prescribed depth over which the near-surface viscosity and diffusivity are ! elevated when the bulk mixed layer is not used. +KVML = 1.0E-04 ! [m2 s-1] default = 1.0E-04 + ! The kinematic viscosity in the mixed layer. A typical value is ~1e-2 m2 s-1. + ! KVML is not used if BULKMIXEDLAYER is true. The default is set by KV. MAXVEL = 6.0 ! [m s-1] default = 3.0E+08 ! The maximum velocity allowed before the velocity components are truncated. @@ -731,7 +733,7 @@ NSTAR = 0.06 ! [nondim] default = 0.2 ! The portion of the buoyant potential energy imparted by surface fluxes that is ! available to drive entrainment at the base of mixed layer when that energy is ! positive. -EPBL_MLD_BISECTION = True ! [Boolean] default = False +EPBL_MLD_BISECTION = True ! [Boolean] default = False ! If true, use bisection with the iterative determination of the self-consistent ! mixed layer depth. Otherwise use the false position after a maximum and ! minimum bound have been evaluated and the returned value or bisection before @@ -833,6 +835,30 @@ ENERGYSAVEDAYS = 1.00 ! [days] default = 1.0 ! === module ocean_model_init === +! === module MOM_oda_incupd === +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False + ! If true, oda incremental updates will be applied + ! everywhere in the domain. +ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. + +ODA_TEMPINC_VAR = "Temp" ! default = "ptemp_inc" + ! The name of the potential temperature inc. variable in + ! ODA_INCUPD_FILE. +ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" + ! The name of the salinity inc. variable in + ! ODA_INCUPD_FILE. +ODA_THK_VAR = "h" ! default = "h" + ! The name of the int. depth inc. variable in + ! ODA_INCUPD_FILE. +ODA_INCUPD_UV = false ! +!ODA_UINC_VAR = "u" ! default = "u_inc" + ! The name of the zonal vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +!ODA_VINC_VAR = "v" ! default = "v_inc" + ! The name of the meridional vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 + ! === module MOM_surface_forcing === OCEAN_SURFACE_STAGGER = "A" ! default = "C" ! A case-insensitive character string to indicate the @@ -868,8 +894,8 @@ LIQUID_RUNOFF_FROM_DATA = @[MOM6_RIVER_RUNOFF] ! [Boolean] default = False ! the data_table using the component name 'OCN'. ! === module ocean_stochastics === DO_SPPT = @[DO_OCN_SPPT] ! [Boolean] default = False - ! If true perturb the diabatic tendencies in MOM_diabadic_driver -PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False + ! If true perturb the diabatic tendencies in MOM_diabatic_driver +PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL ! === module MOM_restart === RESTART_CHECKSUMS_REQUIRED = False diff --git a/parm/mom6/MOM_input_template_050 b/parm/mom6/MOM_input_template_050 index 4e703a4bfd..4c39198c02 100644 --- a/parm/mom6/MOM_input_template_050 +++ b/parm/mom6/MOM_input_template_050 @@ -6,7 +6,6 @@ ! This MOM_input file typically contains only the non-default values that are needed to reproduce this example. ! A full list of parameters for this example can be found in the corresponding MOM_parameter_doc.all file ! which is generated by the model at run-time. - ! === module MOM_domains === TRIPOLAR_N = True ! [Boolean] default = False ! Use tripolar connectivity at the northern edge of the domain. With @@ -419,7 +418,7 @@ GILL_EQUATORIAL_LD = True ! [Boolean] default = False ! radius, otherwise, if false, use Pedlosky's definition. These definitions ! differ by a factor of 2 in front of the beta term in the denominator. Gill's ! is the more appropriate definition. -INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True +INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True ! If true, use a more robust estimate of the first mode wave speed as the ! starting point for iterations. @@ -540,6 +539,9 @@ USE_LAND_MASK_FOR_HVISC = False ! [Boolean] default = False HMIX_FIXED = 0.5 ! [m] ! The prescribed depth over which the near-surface viscosity and diffusivity are ! elevated when the bulk mixed layer is not used. +KVML = 1.0E-04 ! [m2 s-1] default = 1.0E-04 + ! The kinematic viscosity in the mixed layer. A typical value is ~1e-2 m2 s-1. + ! KVML is not used if BULKMIXEDLAYER is true. The default is set by KV. MAXVEL = 6.0 ! [m s-1] default = 3.0E+08 ! The maximum velocity allowed before the velocity components are truncated. @@ -757,7 +759,7 @@ MSTAR2_COEF1 = 0.29 ! [nondim] default = 0.3 MSTAR2_COEF2 = 0.152 ! [nondim] default = 0.085 ! Coefficient in computing mstar when only rotation limits the total mixing ! (used if EPBL_MSTAR_SCHEME = OM4) -EPBL_MLD_BISECTION = True ! [Boolean] default = False +EPBL_MLD_BISECTION = True ! [Boolean] default = False ! If true, use bisection with the iterative determination of the self-consistent ! mixed layer depth. Otherwise use the false position after a maximum and ! minimum bound have been evaluated and the returned value or bisection before @@ -858,8 +860,32 @@ USE_NEUTRAL_DIFFUSION = True ! [Boolean] default = False ! If true, enables the neutral diffusion module. ! === module ocean_model_init === - RESTART_CHECKSUMS_REQUIRED = False + +! === module MOM_oda_incupd === +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False + ! If true, oda incremental updates will be applied + ! everywhere in the domain. +ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. + +ODA_TEMPINC_VAR = "Temp" ! default = "ptemp_inc" + ! The name of the potential temperature inc. variable in + ! ODA_INCUPD_FILE. +ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" + ! The name of the salinity inc. variable in + ! ODA_INCUPD_FILE. +ODA_THK_VAR = "h" ! default = "h" + ! The name of the int. depth inc. variable in + ! ODA_INCUPD_FILE. +ODA_INCUPD_UV = false ! +!ODA_UINC_VAR = "u" ! default = "u_inc" + ! The name of the zonal vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +!ODA_VINC_VAR = "v" ! default = "v_inc" + ! The name of the meridional vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 + ! === module MOM_lateral_boundary_diffusion === ! This module implements lateral diffusion of tracers near boundaries @@ -913,8 +939,8 @@ LIQUID_RUNOFF_FROM_DATA = @[MOM6_RIVER_RUNOFF] ! [Boolean] default = False ! the data_table using the component name 'OCN'. ! === module ocean_stochastics === DO_SPPT = @[DO_OCN_SPPT] ! [Boolean] default = False - ! If true perturb the diabatic tendencies in MOM_diabadic_driver -PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False + ! If true perturb the diabatic tendencies in MOM_diabatic_driver +PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL ! === module MOM_restart === diff --git a/parm/mom6/MOM_input_template_100 b/parm/mom6/MOM_input_template_100 index 1716f6fabd..8b616ad27f 100644 --- a/parm/mom6/MOM_input_template_100 +++ b/parm/mom6/MOM_input_template_100 @@ -1,5 +1,4 @@ ! This file was written by the model and records all non-layout or debugging parameters used at run-time. - ! === module MOM === ! === module MOM_unit_scaling === @@ -76,7 +75,7 @@ SAVE_INITIAL_CONDS = False ! [Boolean] default = False ! If true, write the initial conditions to a file given by IC_OUTPUT_FILE. ! === module MOM_oda_incupd === -ODA_INCUPD = @[MOM_IAU] ! [Boolean] default = False +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False ! If true, oda incremental updates will be applied ! everywhere in the domain. ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. @@ -97,7 +96,7 @@ ODA_UINC_VAR = "u_inc" ! default = "u_inc" ODA_VINC_VAR = "v_inc" ! default = "v_inc" ! The name of the meridional vel. inc. variable in ! ODA_INCUPD_UV_FILE. -ODA_INCUPD_NHOURS = @[MOM_IAU_HRS] ! default=3.0 +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 ! Number of hours for full update (0=direct insertion). ! === module MOM_domains === @@ -163,7 +162,7 @@ TOPO_CONFIG = "file" ! ! Phillips - ACC-like idealized topography used in the Phillips config. ! dense - Denmark Strait-like dense water formation and overflow. ! USER - call a user modified routine. -TOPO_EDITS_FILE = "topo_edits_011818.nc" ! default = "" +TOPO_EDITS_FILE = "@[TOPOEDITS]" ! default = "" ! The file from which to read a list of i,j,z topography overrides. ALLOW_LANDMASK_CHANGES = @[MOM6_ALLOW_LANDMASK_CHANGES] ! default = "False" ! If true, allow topography overrides to change ocean points to land @@ -430,7 +429,7 @@ VISC_RES_FN_POWER = 2 ! [nondim] default = 100 ! used, although even integers are more efficient to calculate. Setting this ! greater than 100 results in a step-function being used. This function affects ! lateral viscosity, Kh, and not KhTh. -INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True +INTERNAL_WAVE_SPEED_BETTER_EST = False ! [Boolean] default = True ! If true, use a more robust estimate of the first mode wave speed as the ! starting point for iterations. @@ -531,6 +530,9 @@ USE_KH_BG_2D = True ! [Boolean] default = False HMIX_FIXED = 0.5 ! [m] ! The prescribed depth over which the near-surface viscosity and diffusivity are ! elevated when the bulk mixed layer is not used. +KVML = 1.0E-04 ! [m2 s-1] default = 1.0E-04 + ! The kinematic viscosity in the mixed layer. A typical value is ~1e-2 m2 s-1. + ! KVML is not used if BULKMIXEDLAYER is true. The default is set by KV. MAXVEL = 6.0 ! [m s-1] default = 3.0E+08 ! The maximum velocity allowed before the velocity components are truncated. @@ -829,6 +831,28 @@ ENERGYSAVEDAYS = 0.25 ! [days] default = 1.0 ! other globally summed diagnostics. ! === module ocean_model_init === +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False + ! If true, oda incremental updates will be applied + ! everywhere in the domain. +ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. + +ODA_TEMPINC_VAR = "Temp" ! default = "ptemp_inc" + ! The name of the potential temperature inc. variable in + ! ODA_INCUPD_FILE. +ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" + ! The name of the salinity inc. variable in + ! ODA_INCUPD_FILE. +ODA_THK_VAR = "h" ! default = "h" + ! The name of the int. depth inc. variable in + ! ODA_INCUPD_FILE. +ODA_INCUPD_UV = false ! +!ODA_UINC_VAR = "u" ! default = "u_inc" + ! The name of the zonal vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +!ODA_VINC_VAR = "v" ! default = "v_inc" + ! The name of the meridional vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 ! === module MOM_surface_forcing === OCEAN_SURFACE_STAGGER = "A" ! default = "C" @@ -856,8 +880,8 @@ FIX_USTAR_GUSTLESS_BUG = False ! [Boolean] default = True ! velocity ! === module ocean_stochastics === DO_SPPT = @[DO_OCN_SPPT] ! [Boolean] default = False - ! If true perturb the diabatic tendencies in MOM_diabadic_driver -PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False + ! If true perturb the diabatic tendencies in MOM_diabatic_driver +PERT_EPBL = @[PERT_EPBL] ! [Boolean] default = False ! If true perturb the KE dissipation and destruction in MOM_energetic_PBL ! === module MOM_restart === diff --git a/parm/mom6/MOM_input_template_500 b/parm/mom6/MOM_input_template_500 new file mode 100644 index 0000000000..5a378caeb0 --- /dev/null +++ b/parm/mom6/MOM_input_template_500 @@ -0,0 +1,541 @@ +! This file was written by the model and records the non-default parameters used at run-time. +! === module MOM === + +! === module MOM_unit_scaling === +! Parameters for doing unit scaling of variables. +USE_REGRIDDING = True ! [Boolean] default = False + ! If True, use the ALE algorithm (regridding/remapping). If False, use the + ! layered isopycnal algorithm. +THICKNESSDIFFUSE = True ! [Boolean] default = False + ! If true, interface heights are diffused with a coefficient of KHTH. +THICKNESSDIFFUSE_FIRST = True ! [Boolean] default = False + ! If true, do thickness diffusion before dynamics. This is only used if + ! THICKNESSDIFFUSE is true. +DT = @[DT_DYNAM_MOM6] ! [s] + ! The (baroclinic) dynamics time step. The time-step that is actually used will + ! be an integer fraction of the forcing time-step (DT_FORCING in ocean-only mode + ! or the coupling timestep in coupled mode.) +DT_THERM = @[DT_THERM_MOM6] ! [s] default = 1800.0 + ! The thermodynamic and tracer advection time step. Ideally DT_THERM should be + ! an integer multiple of DT and less than the forcing or coupling time-step, + ! unless THERMO_SPANS_COUPLING is true, in which case DT_THERM can be an integer + ! multiple of the coupling timestep. By default DT_THERM is set to DT. +THERMO_SPANS_COUPLING = @[MOM6_THERMO_SPAN] ! [Boolean] default = False + ! If true, the MOM will take thermodynamic and tracer timesteps that can be + ! longer than the coupling timestep. The actual thermodynamic timestep that is + ! used in this case is the largest integer multiple of the coupling timestep + ! that is less than or equal to DT_THERM. +HFREEZE = 20.0 ! [m] default = -1.0 + ! If HFREEZE > 0, melt potential will be computed. The actual depth + ! over which melt potential is computed will be min(HFREEZE, OBLD) + ! where OBLD is the boundary layer depth. If HFREEZE <= 0 (default) + ! melt potential will not be computed. +FRAZIL = True ! [Boolean] default = False + ! If true, water freezes if it gets too cold, and the accumulated heat deficit + ! is returned in the surface state. FRAZIL is only used if + ! ENABLE_THERMODYNAMICS is true. +BOUND_SALINITY = True ! [Boolean] default = False + ! If true, limit salinity to being positive. (The sea-ice model may ask for more + ! salt than is available and drive the salinity negative otherwise.) + +! === module MOM_domains === +TRIPOLAR_N = True ! [Boolean] default = False + ! Use tripolar connectivity at the northern edge of the domain. With + ! TRIPOLAR_N, NIGLOBAL must be even. +NIGLOBAL = @[NX_GLB] ! + ! The total number of thickness grid points in the x-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. +NJGLOBAL = @[NY_GLB] ! + ! The total number of thickness grid points in the y-direction in the physical + ! domain. With STATIC_MEMORY_ this is set in MOM_memory.h at compile time. + +! === module MOM_hor_index === +! Sets the horizontal array index types. + +! === module MOM_fixed_initialization === +INPUTDIR = "INPUT" ! default = "." + ! The directory in which input files are found. + +! === module MOM_grid_init === +GRID_CONFIG = "mosaic" ! + ! A character string that determines the method for defining the horizontal + ! grid. Current options are: + ! mosaic - read the grid from a mosaic (supergrid) + ! file set by GRID_FILE. + ! cartesian - use a (flat) Cartesian grid. + ! spherical - use a simple spherical grid. + ! mercator - use a Mercator spherical grid. +GRID_FILE = "ocean_hgrid.nc" ! + ! Name of the file from which to read horizontal grid data. +GRID_ROTATION_ANGLE_BUGS = False ! [Boolean] default = True + ! If true, use an older algorithm to calculate the sine and + ! cosines needed rotate between grid-oriented directions and + ! true north and east. Differences arise at the tripolar fold +USE_TRIPOLAR_GEOLONB_BUG = False ! [Boolean] default = True + ! If true, use older code that incorrectly sets the longitude in some points + ! along the tripolar fold to be off by 360 degrees. +TOPO_CONFIG = "file" ! + ! This specifies how bathymetry is specified: + ! file - read bathymetric information from the file + ! specified by (TOPO_FILE). + ! flat - flat bottom set to MAXIMUM_DEPTH. + ! bowl - an analytically specified bowl-shaped basin + ! ranging between MAXIMUM_DEPTH and MINIMUM_DEPTH. + ! spoon - a similar shape to 'bowl', but with an vertical + ! wall at the southern face. + ! halfpipe - a zonally uniform channel with a half-sine + ! profile in the meridional direction. + ! bbuilder - build topography from list of functions. + ! benchmark - use the benchmark test case topography. + ! Neverworld - use the Neverworld test case topography. + ! DOME - use a slope and channel configuration for the + ! DOME sill-overflow test case. + ! ISOMIP - use a slope and channel configuration for the + ! ISOMIP test case. + ! DOME2D - use a shelf and slope configuration for the + ! DOME2D gravity current/overflow test case. + ! Kelvin - flat but with rotated land mask. + ! seamount - Gaussian bump for spontaneous motion test case. + ! dumbbell - Sloshing channel with reservoirs on both ends. + ! shelfwave - exponential slope for shelfwave test case. + ! Phillips - ACC-like idealized topography used in the Phillips config. + ! dense - Denmark Strait-like dense water formation and overflow. + ! USER - call a user modified routine. +TOPO_FILE = "ocean_topog.nc" ! default = "topog.nc" + ! The file from which the bathymetry is read. +!MAXIMUM_DEPTH = 5801.341919389728 ! [m] + ! The (diagnosed) maximum depth of the ocean. +MINIMUM_DEPTH = 10.0 ! [m] default = 0.0 + ! If MASKING_DEPTH is unspecified, then anything shallower than MINIMUM_DEPTH is + ! assumed to be land and all fluxes are masked out. If MASKING_DEPTH is + ! specified, then all depths shallower than MINIMUM_DEPTH but deeper than + ! MASKING_DEPTH are rounded to MINIMUM_DEPTH. + +! === module MOM_open_boundary === +! Controls where open boundaries are located, what kind of boundary condition to impose, and what data to apply, +! if any. +MASKING_DEPTH = 0.0 ! [m] default = -9999.0 + ! The depth below which to mask points as land points, for which all fluxes are + ! zeroed out. MASKING_DEPTH is ignored if negative. + +! === module MOM_verticalGrid === +! Parameters providing information about the vertical grid. +NK = 25 ! [nondim] + ! The number of model layers. + +! === module MOM_tracer_registry === + +! === module MOM_EOS === +TFREEZE_FORM = "MILLERO_78" ! default = "LINEAR" + ! TFREEZE_FORM determines which expression should be used for the freezing + ! point. Currently, the valid choices are "LINEAR", "MILLERO_78", "TEOS10" + +! === module MOM_restart === +RESTART_CHECKSUMS_REQUIRED = False +! === module MOM_tracer_flow_control === + +! === module MOM_coord_initialization === +COORD_CONFIG = "file" ! default = "none" + ! This specifies how layers are to be defined: + ! ALE or none - used to avoid defining layers in ALE mode + ! file - read coordinate information from the file + ! specified by (COORD_FILE). + ! BFB - Custom coords for buoyancy-forced basin case + ! based on SST_S, T_BOT and DRHO_DT. + ! linear - linear based on interfaces not layers + ! layer_ref - linear based on layer densities + ! ts_ref - use reference temperature and salinity + ! ts_range - use range of temperature and salinity + ! (T_REF and S_REF) to determine surface density + ! and GINT calculate internal densities. + ! gprime - use reference density (RHO_0) for surface + ! density and GINT calculate internal densities. + ! ts_profile - use temperature and salinity profiles + ! (read from COORD_FILE) to set layer densities. + ! USER - call a user modified routine. +COORD_FILE = "layer_coord25.nc" ! + ! The file from which the coordinate densities are read. +REGRIDDING_COORDINATE_MODE = "HYCOM1" ! default = "LAYER" + ! Coordinate mode for vertical regridding. Choose among the following + ! possibilities: LAYER - Isopycnal or stacked shallow water layers + ! ZSTAR, Z* - stretched geopotential z* + ! SIGMA_SHELF_ZSTAR - stretched geopotential z* ignoring shelf + ! SIGMA - terrain following coordinates + ! RHO - continuous isopycnal + ! HYCOM1 - HyCOM-like hybrid coordinate + ! SLIGHT - stretched coordinates above continuous isopycnal + ! ADAPTIVE - optimize for smooth neutral density surfaces +BOUNDARY_EXTRAPOLATION = True ! [Boolean] default = False + ! When defined, a proper high-order reconstruction scheme is used within + ! boundary cells rather than PCM. E.g., if PPM is used for remapping, a PPM + ! reconstruction will also be used within boundary cells. +ALE_COORDINATE_CONFIG = "HYBRID:hycom1_25.nc,sigma2,FNC1:5,4000,4.5,.01" ! default = "UNIFORM" + ! Determines how to specify the coordinate + ! resolution. Valid options are: + ! PARAM - use the vector-parameter ALE_RESOLUTION + ! UNIFORM[:N] - uniformly distributed + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,dz + ! or FILE:lev.nc,interfaces=zw + ! WOA09[:N] - the WOA09 vertical grid (approximately) + ! FNC1:string - FNC1:dz_min,H_total,power,precision + ! HYBRID:string - read from a file. The string specifies + ! the filename and two variable names, separated + ! by a comma or space, for sigma-2 and dz. e.g. + ! HYBRID:vgrid.nc,sigma2,dz +!ALE_RESOLUTION = 2*5.0, 5.01, 5.07, 5.25, 5.68, 6.55, 8.1, 10.66, 14.620000000000001, 20.450000000000003, 28.73, 40.1, 55.32, 75.23, 100.8, 133.09, 173.26, 222.62, 282.56, 354.62, 440.47, 541.87, 660.76, 799.1800000000001 ! [m] + ! The distribution of vertical resolution for the target + ! grid used for Eulerian-like coordinates. For example, + ! in z-coordinate mode, the parameter is a list of level + ! thicknesses (in m). In sigma-coordinate mode, the list + ! is of non-dimensional fractions of the water column. +!TARGET_DENSITIES = 1010.0, 1020.843017578125, 1027.0274658203125, 1029.279541015625, 1030.862548828125, 1032.1572265625, 1033.27978515625, 1034.251953125, 1034.850830078125, 1035.28857421875, 1035.651123046875, 1035.967529296875, 1036.2410888671875, 1036.473876953125, 1036.6800537109375, 1036.8525390625, 1036.9417724609375, 1037.0052490234375, 1037.057373046875, 1037.1065673828125, 1037.15576171875, 1037.2060546875, 1037.26416015625, 1037.3388671875, 1037.4749755859375, 1038.0 ! [m] + ! HYBRID target densities for itnerfaces +REGRID_COMPRESSIBILITY_FRACTION = 0.01 ! [not defined] default = 0.0 + ! When interpolating potential density profiles we can add + ! some artificial compressibility solely to make homogenous + ! regions appear stratified. +MAXIMUM_INT_DEPTH_CONFIG = "FNC1:5,8000.0,1.0,.125" ! default = "NONE" + ! Determines how to specify the maximum interface depths. + ! Valid options are: + ! NONE - there are no maximum interface depths + ! PARAM - use the vector-parameter MAXIMUM_INTERFACE_DEPTHS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAXIMUM_INT_DEPTHS = 0.0, 5.0, 36.25, 93.75, 177.5, 287.5, 423.75, 586.25, 775.0, 990.0, 1231.25, 1498.75, 1792.5, 2112.5, 2458.75, 2831.25, 3230.0, 3655.0, 4106.25, 4583.75, 5087.5, 5617.5, 6173.75, 6756.25, 7365.0, 8000.0 ! [m] + ! The list of maximum depths for each interface. +MAX_LAYER_THICKNESS_CONFIG = "FNC1:400,31000.0,0.1,.01" ! default = "NONE" + ! Determines how to specify the maximum layer thicknesses. + ! Valid options are: + ! NONE - there are no maximum layer thicknesses + ! PARAM - use the vector-parameter MAX_LAYER_THICKNESS + ! FILE:string - read from a file. The string specifies + ! the filename and variable name, separated + ! by a comma or space, e.g. FILE:lev.nc,Z + ! FNC1:string - FNC1:dz_min,H_total,power,precision +!MAX_LAYER_THICKNESS = 400.0, 1094.2, 1144.02, 1174.81, 1197.42, 1215.4099999999999, 1230.42, 1243.3200000000002, 1254.65, 1264.78, 1273.94, 1282.31, 1290.02, 1297.17, 1303.85, 1310.1, 1316.0, 1321.5700000000002, 1326.85, 1331.87, 1336.67, 1341.25, 1345.6399999999999, 1349.85, 1353.88 ! [m] + ! The list of maximum thickness for each layer. +REMAPPING_SCHEME = "PPM_H4" ! default = "PLM" + ! This sets the reconstruction scheme used for vertical remapping for all + ! variables. It can be one of the following schemes: PCM (1st-order + ! accurate) + ! PLM (2nd-order accurate) + ! PPM_H4 (3rd-order accurate) + ! PPM_IH4 (3rd-order accurate) + ! PQM_IH4IH3 (4th-order accurate) + ! PQM_IH6IH5 (5th-order accurate) + +! === module MOM_grid === +! Parameters providing information about the lateral grid. + +! === module MOM_state_initialization === +INIT_LAYERS_FROM_Z_FILE = True ! [Boolean] default = False + ! If true, initialize the layer thicknesses, temperatures, and salinities from a + ! Z-space file on a latitude-longitude grid. + +! === module MOM_initialize_layers_from_Z === +TEMP_SALT_Z_INIT_FILE = "" ! default = "temp_salt_z.nc" + ! The name of the z-space input file used to initialize + ! temperatures (T) and salinities (S). If T and S are not + ! in the same file, TEMP_Z_INIT_FILE and SALT_Z_INIT_FILE + ! must be set. +TEMP_Z_INIT_FILE = "woa18_decav_t00_01.nc" ! default = "" + ! The name of the z-space input file used to initialize + ! temperatures, only. +SALT_Z_INIT_FILE = "woa18_decav_s00_01.nc" ! default = "" + ! The name of the z-space input file used to initialize + ! temperatures, only. +Z_INIT_FILE_PTEMP_VAR = "t_an" ! default = "ptemp" + ! The name of the potential temperature variable in + ! TEMP_Z_INIT_FILE. +Z_INIT_FILE_SALT_VAR = "s_an" ! default = "salt" + ! The name of the salinity variable in + ! SALT_Z_INIT_FILE. +Z_INIT_ALE_REMAPPING = True ! [Boolean] default = False + ! If True, then remap straight to model coordinate from file. + +! === module MOM_diag_mediator === + +! === module MOM_MEKE === +USE_MEKE = True ! [Boolean] default = False + ! If true, turns on the MEKE scheme which calculates a sub-grid mesoscale eddy + ! kinetic energy budget. + +! === module MOM_lateral_mixing_coeffs === +USE_VARIABLE_MIXING = True ! [Boolean] default = False + ! If true, the variable mixing code will be called. This allows diagnostics to + ! be created even if the scheme is not used. If KHTR_SLOPE_CFF>0 or + ! KhTh_Slope_Cff>0, this is set to true regardless of what is in the parameter + ! file. +! === module MOM_set_visc === +CHANNEL_DRAG = True ! [Boolean] default = False + ! If true, the bottom drag is exerted directly on each layer proportional to the + ! fraction of the bottom it overlies. +HBBL = 10.0 ! [m] + ! The thickness of a bottom boundary layer with a viscosity of KVBBL if + ! BOTTOMDRAGLAW is not defined, or the thickness over which near-bottom + ! velocities are averaged for the drag law if BOTTOMDRAGLAW is defined but + ! LINEAR_DRAG is not. +KV = 1.0E-04 ! [m2 s-1] + ! The background kinematic viscosity in the interior. The molecular value, ~1e-6 + ! m2 s-1, may be used. + +! === module MOM_continuity === + +! === module MOM_continuity_PPM === + +! === module MOM_CoriolisAdv === +CORIOLIS_SCHEME = "SADOURNY75_ENSTRO" ! default = "SADOURNY75_ENERGY" + ! CORIOLIS_SCHEME selects the discretization for the Coriolis terms. Valid + ! values are: + ! SADOURNY75_ENERGY - Sadourny, 1975; energy cons. + ! ARAKAWA_HSU90 - Arakawa & Hsu, 1990 + ! SADOURNY75_ENSTRO - Sadourny, 1975; enstrophy cons. + ! ARAKAWA_LAMB81 - Arakawa & Lamb, 1981; En. + Enst. + ! ARAKAWA_LAMB_BLEND - A blend of Arakawa & Lamb with + ! Arakawa & Hsu and Sadourny energy +BOUND_CORIOLIS = True ! [Boolean] default = False + ! If true, the Coriolis terms at u-points are bounded by the four estimates of + ! (f+rv)v from the four neighboring v-points, and similarly at v-points. This + ! option would have no effect on the SADOURNY Coriolis scheme if it were + ! possible to use centered difference thickness fluxes. + +! === module MOM_PressureForce === + +! === module MOM_PressureForce_AFV === +MASS_WEIGHT_IN_PRESSURE_GRADIENT = True ! [Boolean] default = False + ! If true, use mass weighting when interpolating T/S for integrals near the + ! bathymetry in AFV pressure gradient calculations. + +! === module MOM_hor_visc === +LAPLACIAN = True ! [Boolean] default = False + ! If true, use a Laplacian horizontal viscosity. +KH_VEL_SCALE = 0.01 ! [m s-1] default = 0.0 + ! The velocity scale which is multiplied by the grid spacing to calculate the + ! Laplacian viscosity. The final viscosity is the largest of this scaled + ! viscosity, the Smagorinsky and Leith viscosities, and KH. +KH_SIN_LAT = 2000.0 ! [m2 s-1] default = 0.0 + ! The amplitude of a latitudinally-dependent background viscosity of the form + ! KH_SIN_LAT*(SIN(LAT)**KH_PWR_OF_SINE). +SMAGORINSKY_KH = True ! [Boolean] default = False + ! If true, use a Smagorinsky nonlinear eddy viscosity. +SMAG_LAP_CONST = 0.15 ! [nondim] default = 0.0 + ! The nondimensional Laplacian Smagorinsky constant, often 0.15. +AH_VEL_SCALE = 0.01 ! [m s-1] default = 0.0 + ! The velocity scale which is multiplied by the cube of the grid spacing to + ! calculate the biharmonic viscosity. The final viscosity is the largest of this + ! scaled viscosity, the Smagorinsky and Leith viscosities, and AH. +SMAGORINSKY_AH = True ! [Boolean] default = False + ! If true, use a biharmonic Smagorinsky nonlinear eddy viscosity. +SMAG_BI_CONST = 0.06 ! [nondim] default = 0.0 + ! The nondimensional biharmonic Smagorinsky constant, typically 0.015 - 0.06. +USE_LAND_MASK_FOR_HVISC = True ! [Boolean] default = False + ! If true, use Use the land mask for the computation of thicknesses at velocity + ! locations. This eliminates the dependence on arbitrary values over land or + ! outside of the domain. + +! === module MOM_vert_friction === +HMIX_FIXED = 0.5 ! [m] + ! The prescribed depth over which the near-surface viscosity and diffusivity are + ! elevated when the bulk mixed layer is not used. +KVML = 1.0E-04 ! [m2 s-1] default = 1.0E-04 + ! The kinematic viscosity in the mixed layer. A typical value is ~1e-2 m2 s-1. + ! KVML is not used if BULKMIXEDLAYER is true. The default is set by KV. +MAXVEL = 6.0 ! [m s-1] default = 3.0E+08 + ! The maximum velocity allowed before the velocity components are truncated. + +! === module MOM_barotropic === +BOUND_BT_CORRECTION = True ! [Boolean] default = False + ! If true, the corrective pseudo mass-fluxes into the barotropic solver are + ! limited to values that require less than maxCFL_BT_cont to be accommodated. +BT_PROJECT_VELOCITY = True ! [Boolean] default = False + ! If true, step the barotropic velocity first and project out the velocity + ! tendency by 1+BEBT when calculating the transport. The default (false) is to + ! use a predictor continuity step to find the pressure field, and then to do a + ! corrector continuity step using a weighted average of the old and new + ! velocities, with weights of (1-BEBT) and BEBT. +DYNAMIC_SURFACE_PRESSURE = False ! [Boolean] default = False + ! If true, add a dynamic pressure due to a viscous ice shelf, for instance. +BEBT = 0.2 ! [nondim] default = 0.1 + ! BEBT determines whether the barotropic time stepping uses the forward-backward + ! time-stepping scheme or a backward Euler scheme. BEBT is valid in the range + ! from 0 (for a forward-backward treatment of nonrotating gravity waves) to 1 + ! (for a backward Euler treatment). In practice, BEBT must be greater than about + ! 0.05. +DTBT = -0.9 ! [s or nondim] default = -0.98 + ! The barotropic time step, in s. DTBT is only used with the split explicit time + ! stepping. To set the time step automatically based the maximum stable value + ! use 0, or a negative value gives the fraction of the stable value. Setting + ! DTBT to 0 is the same as setting it to -0.98. The value of DTBT that will + ! actually be used is an integer fraction of DT, rounding down. + +! === module MOM_mixed_layer_restrat === +MIXEDLAYER_RESTRAT = False ! [Boolean] default = False + ! If true, a density-gradient dependent re-stratifying flow is imposed in the + ! mixed layer. Can be used in ALE mode without restriction but in layer mode can + ! only be used if BULKMIXEDLAYER is true. +FOX_KEMPER_ML_RESTRAT_COEF = 60.0 ! [nondim] default = 0.0 + ! A nondimensional coefficient that is proportional to the ratio of the + ! deformation radius to the dominant lengthscale of the submesoscale mixed layer + ! instabilities, times the minimum of the ratio of the mesoscale eddy kinetic + ! energy to the large-scale geostrophic kinetic energy or 1 plus the square of + ! the grid spacing over the deformation radius, as detailed by Fox-Kemper et al. + ! (2010) +MLE_FRONT_LENGTH = 200.0 ! [m] default = 0.0 + ! If non-zero, is the frontal-length scale used to calculate the upscaling of + ! buoyancy gradients that is otherwise represented by the parameter + ! FOX_KEMPER_ML_RESTRAT_COEF. If MLE_FRONT_LENGTH is non-zero, it is recommended + ! to set FOX_KEMPER_ML_RESTRAT_COEF=1.0. +MLE_USE_PBL_MLD = True ! [Boolean] default = False + ! If true, the MLE parameterization will use the mixed-layer depth provided by + ! the active PBL parameterization. If false, MLE will estimate a MLD based on a + ! density difference with the surface using the parameter MLE_DENSITY_DIFF. +MLE_MLD_DECAY_TIME = 2.592E+06 ! [s] default = 0.0 + ! The time-scale for a running-mean filter applied to the mixed-layer depth used + ! in the MLE restratification parameterization. When the MLD deepens below the + ! current running-mean the running-mean is instantaneously set to the current + ! MLD. + +! === module MOM_diabatic_driver === +! The following parameters are used for diabatic processes. +ENERGETICS_SFC_PBL = True ! [Boolean] default = False + ! If true, use an implied energetics planetary boundary layer scheme to + ! determine the diffusivity and viscosity in the surface boundary layer. +EPBL_IS_ADDITIVE = False ! [Boolean] default = True + ! If true, the diffusivity from ePBL is added to all other diffusivities. + ! Otherwise, the larger of kappa-shear and ePBL diffusivities are used. + +! === module MOM_CVMix_KPP === +! This is the MOM wrapper to CVMix:KPP +! See http://cvmix.github.io/ + +! === module MOM_tidal_mixing === +! Vertical Tidal Mixing Parameterization + +! === module MOM_CVMix_conv === +! Parameterization of enhanced mixing due to convection via CVMix + +! === module MOM_set_diffusivity === + +! === module MOM_bkgnd_mixing === +! Adding static vertical background mixing coefficients +KD = 1.5E-05 ! [m2 s-1] default = 0.0 + ! The background diapycnal diffusivity of density in the interior. Zero or the + ! molecular value, ~1e-7 m2 s-1, may be used. +KD_MIN = 2.0E-06 ! [m2 s-1] default = 2.0E-07 + ! The minimum diapycnal diffusivity. +HENYEY_IGW_BACKGROUND = True ! [Boolean] default = False + ! If true, use a latitude-dependent scaling for the near surface background + ! diffusivity, as described in Harrison & Hallberg, JPO 2008. + +! === module MOM_kappa_shear === +! Parameterization of shear-driven turbulence following Jackson, Hallberg and Legg, JPO 2008 +USE_JACKSON_PARAM = True ! [Boolean] default = False + ! If true, use the Jackson-Hallberg-Legg (JPO 2008) shear mixing + ! parameterization. +MAX_RINO_IT = 25 ! [nondim] default = 50 + ! The maximum number of iterations that may be used to estimate the Richardson + ! number driven mixing. + +! === module MOM_CVMix_shear === +! Parameterization of shear-driven turbulence via CVMix (various options) + +! === module MOM_CVMix_ddiff === +! Parameterization of mixing due to double diffusion processes via CVMix + +! === module MOM_diabatic_aux === +! The following parameters are used for auxiliary diabatic processes. + +! === module MOM_energetic_PBL === +EPBL_USTAR_MIN = 1.45842E-18 ! [m s-1] + ! The (tiny) minimum friction velocity used within the ePBL code, derived from + ! OMEGA and ANGSTROM.. +USE_LA_LI2016 = @[MOM6_USE_LI2016] ! [nondim] default = False + ! A logical to use the Li et al. 2016 (submitted) formula to determine the + ! Langmuir number. +USE_WAVES = @[MOM6_USE_WAVES] ! [Boolean] default = False + ! If true, enables surface wave modules. + +! === module MOM_regularize_layers === + +! === module MOM_opacity === + +! === module MOM_tracer_advect === +TRACER_ADVECTION_SCHEME = "PPM:H3" ! default = "PLM" + ! The horizontal transport scheme for tracers: + ! PLM - Piecewise Linear Method + ! PPM:H3 - Piecewise Parabolic Method (Huyhn 3rd order) + ! PPM - Piecewise Parabolic Method (Colella-Woodward) + +! === module MOM_tracer_hor_diff === +KHTR = 50.0 ! [m2 s-1] default = 0.0 + ! The background along-isopycnal tracer diffusivity. +CHECK_DIFFUSIVE_CFL = True ! [Boolean] default = False + ! If true, use enough iterations the diffusion to ensure that the diffusive + ! equivalent of the CFL limit is not violated. If false, always use the greater + ! of 1 or MAX_TR_DIFFUSION_CFL iteration. +MAX_TR_DIFFUSION_CFL = 2.0 ! [nondim] default = -1.0 + ! If positive, locally limit the along-isopycnal tracer diffusivity to keep the + ! diffusive CFL locally at or below this value. The number of diffusive + ! iterations is often this value or the next greater integer. + +! === module MOM_neutral_diffusion === +! This module implements neutral diffusion of tracers +USE_NEUTRAL_DIFFUSION = True ! [Boolean] default = False + ! If true, enables the neutral diffusion module. + +! === module MOM_sum_output === +MAXTRUNC = 1000 ! [truncations save_interval-1] default = 0 + ! The run will be stopped, and the day set to a very large value if the velocity + ! is truncated more than MAXTRUNC times between energy saves. Set MAXTRUNC to 0 + ! to stop if there is any truncation of velocities. + +! === module ocean_model_init === + +! === module MOM_oda_incupd === +ODA_INCUPD = @[ODA_INCUPD] ! [Boolean] default = False + ! If true, oda incremental updates will be applied + ! everywhere in the domain. +ODA_INCUPD_FILE = "mom6_increment.nc" ! The name of the file with the T,S,h increments. + +ODA_TEMPINC_VAR = "Temp" ! default = "ptemp_inc" + ! The name of the potential temperature inc. variable in + ! ODA_INCUPD_FILE. +ODA_SALTINC_VAR = "Salt" ! default = "sal_inc" + ! The name of the salinity inc. variable in + ! ODA_INCUPD_FILE. +ODA_THK_VAR = "h" ! default = "h" + ! The name of the int. depth inc. variable in + ! ODA_INCUPD_FILE. +ODA_INCUPD_UV = false ! +!ODA_UINC_VAR = "u" ! default = "u_inc" + ! The name of the zonal vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +!ODA_VINC_VAR = "v" ! default = "v_inc" + ! The name of the meridional vel. inc. variable in + ! ODA_INCUPD_UV_FILE. +ODA_INCUPD_NHOURS = @[ODA_INCUPD_NHOURS] ! default=3.0 + +! === module MOM_surface_forcing === +OCEAN_SURFACE_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the surface velocity field that is + ! returned to the coupler. Valid values include + ! 'A', 'B', or 'C'. + +MAX_P_SURF = 0.0 ! [Pa] default = -1.0 + ! The maximum surface pressure that can be exerted by the atmosphere and + ! floating sea-ice or ice shelves. This is needed because the FMS coupling + ! structure does not limit the water that can be frozen out of the ocean and the + ! ice-ocean heat fluxes are treated explicitly. No limit is applied if a + ! negative value is used. +WIND_STAGGER = "A" ! default = "C" + ! A case-insensitive character string to indicate the + ! staggering of the input wind stress field. Valid + ! values are 'A', 'B', or 'C'. +! === module MOM_restart === + +! === module MOM_file_parser === diff --git a/parm/parm_fv3diag/diag_table b/parm/parm_fv3diag/diag_table index e20c71e029..37421f8a4f 100644 --- a/parm/parm_fv3diag/diag_table +++ b/parm/parm_fv3diag/diag_table @@ -1,6 +1,83 @@ "fv3_history", 0, "hours", 1, "hours", "time" "fv3_history2d", 0, "hours", 1, "hours", "time" +"ocn%4yr%2mo%2dy%2hr", 6, "hours", 1, "hours", "time", 6, "hours", "1901 1 1 0 0 0" +"ocn_daily%4yr%2mo%2dy", 1, "days", 1, "days", "time", 1, "days", "1901 1 1 0 0 0" +############## +# Ocean fields +############## +# static fields +"ocean_model", "geolon", "geolon", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat", "geolat", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolon_c", "geolon_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat_c", "geolat_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolon_u", "geolon_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat_u", "geolat_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolon_v", "geolon_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "geolat_v", "geolat_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +# "ocean_model", "depth_ocean", "depth_ocean", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +# "ocean_model", "wet", "wet", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "wet_c", "wet_c", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "wet_u", "wet_u", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "wet_v", "wet_v", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "sin_rot", "sin_rot", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 +"ocean_model", "cos_rot", "cos_rot", "ocn%4yr%2mo%2dy%2hr", "all", .false., "none", 2 + +# ocean output TSUV and others +"ocean_model", "SSH", "SSH", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "SST", "SST", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "SSS", "SSS", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "speed", "speed", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "SSU", "SSU", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "SSV", "SSV", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "frazil", "frazil", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "ePBL_h_ML", "ePBL", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "MLD_003", "MLD_003", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model", "MLD_0125", "MLD_0125", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 + +# Z-Space Fields Provided for CMIP6 (CMOR Names): +"ocean_model_z", "uo", "uo", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model_z", "vo", "vo", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model_z", "so", "so", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 +"ocean_model_z", "temp", "temp", "ocn%4yr%2mo%2dy%2hr", "all", .true., "none", 2 + +# forcing +"ocean_model", "taux", "taux", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "tauy", "tauy", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "latent", "latent", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "sensible", "sensible", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "SW", "SW", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "LW", "LW", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "evap", "evap", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "lprec", "lprec", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "lrunoff", "lrunoff", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +# "ocean_model", "frunoff", "frunoff", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "fprec", "fprec", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "LwLatSens", "LwLatSens", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 +"ocean_model", "Heat_PmE", "Heat_PmE", "ocn%4yr%2mo%2dy%2hr","all",.true.,"none",2 + +# Daily fields +"ocean_model", "geolon", "geolon", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 +"ocean_model", "geolat", "geolat", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 +"ocean_model", "geolon_c", "geolon_c", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 +"ocean_model", "geolat_c", "geolat_c", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 +"ocean_model", "geolon_u", "geolon_u", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 +"ocean_model", "geolat_u", "geolat_u", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 +"ocean_model", "geolon_v", "geolon_v", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 +"ocean_model", "geolat_v", "geolat_v", "ocn_daily%4yr%2mo%2dy", "all", .false., "none", 2 +"ocean_model", "SST", "sst", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 +"ocean_model", "latent", "latent", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 +"ocean_model", "sensible", "sensible", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 +"ocean_model", "SW", "SW", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 +"ocean_model", "LW", "LW", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 +"ocean_model", "evap", "evap", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 +"ocean_model", "lprec", "lprec", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 +"ocean_model", "taux", "taux", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 +"ocean_model", "tauy", "tauy", "ocn_daily%4yr%2mo%2dy", "all", .true., "none", 2 + +################### +# Atmosphere fields +################### "gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "sphum", "spfh", "fv3_history", "all", .false., "none", 2 @@ -22,6 +99,14 @@ "gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frzr", "frzr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frzrb", "frzrb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frozr", "frozr", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "frozrb", "frozrb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tsnowp", "tsnowp", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "tsnowpb", "tsnowpb", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "rhonewsn", "rhonewsn", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 diff --git a/parm/parm_fv3diag/diag_table_aod b/parm/parm_fv3diag/diag_table_aod index fd8aee1791..0de51b66d8 100644 --- a/parm/parm_fv3diag/diag_table_aod +++ b/parm/parm_fv3diag/diag_table_aod @@ -3,4 +3,4 @@ "gfs_phys", "SU_AOD_550", "su_aod550", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "BC_AOD_550", "bc_aod550", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "OC_AOD_550", "oc_aod550", "fv3_history2d", "all", .false., "none", 2 -"gfs_phys", "SS_AOD_550", "ss_aod550", "fv3_history2d", "all", .false., "none", 2 +"gfs_phys", "SS_AOD_550", "ss_aod550", "fv3_history2d", "all", .false., "none", 2 \ No newline at end of file diff --git a/parm/parm_fv3diag/diag_table_da b/parm/parm_fv3diag/diag_table_da index a73ebfea76..cdcc36ee57 100644 --- a/parm/parm_fv3diag/diag_table_da +++ b/parm/parm_fv3diag/diag_table_da @@ -1,5 +1,16 @@ "fv3_history", 0, "hours", 1, "hours", "time" "fv3_history2d", 0, "hours", 1, "hours", "time" +"ocn_da%4yr%2mo%2dy%2hr", 1, "hours", 1, "hours", "time", 1, "hours" + +"ocean_model", "geolon", "geolon", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "geolat", "geolat", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "SSH", "ave_ssh", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "MLD_0125", "MLD", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "u", "u", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "v", "v", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "h", "h", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "salt", "Salt", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 +"ocean_model", "temp", "Temp", "ocn_da%4yr%2mo%2dy%2hr", "all", "none", "none", 2 "gfs_dyn", "ucomp", "ugrd", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "vcomp", "vgrd", "fv3_history", "all", .false., "none", 2 @@ -22,6 +33,7 @@ "gfs_dyn", "ps", "pressfc", "fv3_history", "all", .false., "none", 2 "gfs_dyn", "hs", "hgtsfc", "fv3_history", "all", .false., "none", 2 +"gfs_phys", "cldfra", "cldfra", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "ALBDO_ave", "albdo_ave", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "cnvprcp_ave", "cprat_ave", "fv3_history2d", "all", .false., "none", 2 "gfs_phys", "cnvprcpb_ave", "cpratb_ave", "fv3_history2d", "all", .false., "none", 2 diff --git a/parm/parm_fv3diag/diag_table_history b/parm/parm_fv3diag/diag_table_history deleted file mode 100644 index 9a5766c27c..0000000000 --- a/parm/parm_fv3diag/diag_table_history +++ /dev/null @@ -1,89 +0,0 @@ -#"atmos_static", -1, "hours", 1, "hours", "time" -"fv3_history", 0, "hours", 1, "hours", "time" -"fv3_history2d", 0, "hours", 1, "hours", "time" -# -# static data -# "dynamics", "pk", "pk", "atmos_static", "all", .false., "none", 2 -# "dynamics", "bk", "bk", "atmos_static", "all", .false., "none", 2 -# "dynamics", "hyam", "hyam", "atmos_static", "all", .false., "none", 2 -# "dynamics", "hybm", "hybm", "atmos_static", "all", .false., "none", 2 -# "dynamics", "zsurf", "zsurf", "atmos_static", "all", .false., "none", 2 -# -# history files -"gfs_dyn", "ucomp", "ucomp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "vcomp", "vcomp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "sphum", "sphum", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "temp", "temp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "liq_wat", "liq_wat", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "o3mr", "o3mr", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delp", "delp", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "pfhy", "hypres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "pfnh", "nhpres", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "w", "vvel", "fv3_history", "all", .false., "none", 2 -"gfs_dyn", "delz", "delz", "fv3_history", "all", .false., "none", 2 -# -"gfs_sfc" "hgtsfc" "hgtsfc" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "psurf" "pressfc" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "u10m" "u10m" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "v10m" "v10m" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "soilm" "soilm" "fv3_history2d" "all" .false. "none" 2 -"gfs_phys" "cnvprcp" "cnvprcp" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tprcp" "tprcp" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "weasd" "weasd" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "f10m" "f10m" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "q2m" "q2m" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "t2m" "t2m" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tsfc" "tsfc" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "vtype" "vtype" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "stype" "stype" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slmsksfc" "slmsk" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "vfracsfc" "vfrac" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "zorlsfc" "zorl" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "uustar" "uustar" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilt1" "soilt1" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilt2" "soilt2" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilt3" "soilt3" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilt4" "soilt4" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilw1" "soilw1" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilw2" "soilw2" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilw3" "soilw3" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "soilw4" "soilw4" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slc_1" "slc_1" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slc_2" "slc_2" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slc_3" "slc_3" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slc_4" "slc_4" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "slope" "slope" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "alnsf" "alnsf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "alnwf" "alnwf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "alvsf" "alvsf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "alvwf" "alvwf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "canopy" "canopy" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "facsf" "facsf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "facwf" "facwf" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "ffhh" "ffhh" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "ffmm" "ffmm" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "fice" "fice" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "hice" "hice" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "snoalb" "snoalb" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "shdmax" "shdmax" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "shdmin" "shdmin" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "snowd" "snowd" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tg3" "tg3" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tisfc" "tisfc" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "tref" "tref" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "z_c" "z_c" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "c_0" "c_0" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "c_d" "c_d" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "w_0" "w_0" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "w_d" "w_d" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xt" "xt" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xz" "xz" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "dt_cool" "dt_cool" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xs" "xs" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xu" "xu" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xv" "xv" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xtts" "xtts" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "xzts" "xzts" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "d_conv" "d_conv" "fv3_history2d" "all" .false. "none" 2 -"gfs_sfc" "qrain" "qrain" "fv3_history2d" "all" .false. "none" 2 - diff --git a/parm/parm_fv3diag/field_table_gfdl_progsigma b/parm/parm_fv3diag/field_table_gfdl_progsigma new file mode 100644 index 0000000000..f7668455da --- /dev/null +++ b/parm/parm_fv3diag/field_table_gfdl_progsigma @@ -0,0 +1,42 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "rainwat" + "longname", "rain mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "ice_wat" + "longname", "cloud ice mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "snowwat" + "longname", "snow mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "graupel" + "longname", "graupel mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognotsitc sigmab tracer + "TRACER", "atmos_mod", "sigmab" + "longname", "sigma fraction" + "units", "fraction" + "profile_type", "fixed", "surface_value=0.0" / +# non-prognostic cloud amount + "TRACER", "atmos_mod", "cld_amt" + "longname", "cloud amount" + "units", "1" + "profile_type", "fixed", "surface_value=1.e30" / diff --git a/parm/parm_fv3diag/field_table_gfdl_satmedmf_progsigma b/parm/parm_fv3diag/field_table_gfdl_satmedmf_progsigma new file mode 100644 index 0000000000..edc5389839 --- /dev/null +++ b/parm/parm_fv3diag/field_table_gfdl_satmedmf_progsigma @@ -0,0 +1,47 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "rainwat" + "longname", "rain mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "ice_wat" + "longname", "cloud ice mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "snowwat" + "longname", "snow mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "graupel" + "longname", "graupel mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic subgrid scale turbulent kinetic energy + "TRACER", "atmos_mod", "sgs_tke" + "longname", "subgrid scale turbulent kinetic energy" + "units", "m2/s2" + "profile_type", "fixed", "surface_value=0.0" / +# prognotsitc sigmab tracer + "TRACER", "atmos_mod", "sigmab" + "longname", "sigma fraction" + "units", "fraction" + "profile_type", "fixed", "surface_value=0.0" / +# non-prognostic cloud amount + "TRACER", "atmos_mod", "cld_amt" + "longname", "cloud amount" + "units", "1" + "profile_type", "fixed", "surface_value=1.e30" / diff --git a/parm/parm_fv3diag/field_table_thompson_aero_tke_progsigma b/parm/parm_fv3diag/field_table_thompson_aero_tke_progsigma new file mode 100644 index 0000000000..fcb719acd6 --- /dev/null +++ b/parm/parm_fv3diag/field_table_thompson_aero_tke_progsigma @@ -0,0 +1,70 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ice water mixing ratio + "TRACER", "atmos_mod", "ice_wat" + "longname", "cloud ice mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic rain water mixing ratio + "TRACER", "atmos_mod", "rainwat" + "longname", "rain water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic snow water mixing ratio + "TRACER", "atmos_mod", "snowwat" + "longname", "snow water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic Grau water mixing ratio + "TRACER", "atmos_mod", "graupel" + "longname", "graupel mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud water number concentration + "TRACER", "atmos_mod", "water_nc" + "longname", "cloud liquid water number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic cloud ice number concentration + "TRACER", "atmos_mod", "ice_nc" + "longname", "cloud ice water number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic rain number concentration + "TRACER", "atmos_mod", "rain_nc" + "longname", "rain number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# water- and ice-friendly aerosols (Thompson) + "TRACER", "atmos_mod", "liq_aero" + "longname", "water-friendly aerosol number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / + "TRACER", "atmos_mod", "ice_aero" + "longname", "ice-friendly aerosol number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic subgrid scale turbulent kinetic energy + "TRACER", "atmos_mod", "sgs_tke" + "longname", "subgrid scale turbulent kinetic energy" + "units", "m2/s2" + "profile_type", "fixed", "surface_value=0.0" / +# prognotsitc sigmab tracer + "TRACER", "atmos_mod", "sigmab" + "longname", "sigma fraction" + "units", "fraction" + "profile_type", "fixed", "surface_value=0.0" / diff --git a/parm/parm_fv3diag/field_table_thompson_noaero_tke_progsigma b/parm/parm_fv3diag/field_table_thompson_noaero_tke_progsigma new file mode 100644 index 0000000000..f424eb0d21 --- /dev/null +++ b/parm/parm_fv3diag/field_table_thompson_noaero_tke_progsigma @@ -0,0 +1,70 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ice water mixing ratio + "TRACER", "atmos_mod", "ice_wat" + "longname", "cloud ice mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic rain water mixing ratio + "TRACER", "atmos_mod", "rainwat" + "longname", "rain water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic snow water mixing ratio + "TRACER", "atmos_mod", "snowwat" + "longname", "snow water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic Grau water mixing ratio + "TRACER", "atmos_mod", "graupel" + "longname", "graupel mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic cloud water number concentration - not for non-aerosol runs +# "TRACER", "atmos_mod", "water_nc" +# "longname", "cloud liquid water number concentration" +# "units", "/kg" +# "profile_type", "fixed", "surface_value=0.0" / +# prognostic cloud ice number concentration + "TRACER", "atmos_mod", "ice_nc" + "longname", "cloud ice water number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic rain number concentration + "TRACER", "atmos_mod", "rain_nc" + "longname", "rain number concentration" + "units", "/kg" + "profile_type", "fixed", "surface_value=0.0" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# water- and ice-friendly aerosols (Thompson) - not for non-aerosol runs +# "TRACER", "atmos_mod", "liq_aero" +# "longname", "water-friendly aerosol number concentration" +# "units", "/kg" +# "profile_type", "fixed", "surface_value=0.0" / +# "TRACER", "atmos_mod", "ice_aero" +# "longname", "ice-friendly aerosol number concentration" +# "units", "/kg" +# "profile_type", "fixed", "surface_value=0.0" / +# prognostic subgrid scale turbulent kinetic energy + "TRACER", "atmos_mod", "sgs_tke" + "longname", "subgrid scale turbulent kinetic energy" + "units", "m2/s2" + "profile_type", "fixed", "surface_value=0.0" / +# prognotsitc sigmab tracer + "TRACER", "atmos_mod", "sigmab" + "longname", "sigma fraction" + "units", "fraction" + "profile_type", "fixed", "surface_value=0.0" / \ No newline at end of file diff --git a/parm/parm_fv3diag/field_table_wsm6_progsigma b/parm/parm_fv3diag/field_table_wsm6_progsigma new file mode 100644 index 0000000000..3bc52e1296 --- /dev/null +++ b/parm/parm_fv3diag/field_table_wsm6_progsigma @@ -0,0 +1,38 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "ice_wat" + "longname", "ice water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "rainwat" + "longname", "rain water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "snowwat" + "longname", "snow water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "graupel" + "longname", "graupel mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognotsitc sigmab tracer + "TRACER", "atmos_mod", "sigmab" + "longname", "sigma fraction" + "units", "fraction" + "profile_type", "fixed", "surface_value=0.0" / diff --git a/parm/parm_fv3diag/field_table_wsm6_satmedmf_progsigma b/parm/parm_fv3diag/field_table_wsm6_satmedmf_progsigma new file mode 100644 index 0000000000..a73d13dbbf --- /dev/null +++ b/parm/parm_fv3diag/field_table_wsm6_satmedmf_progsigma @@ -0,0 +1,43 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "ice_wat" + "longname", "ice water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "rainwat" + "longname", "rain water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "snowwat" + "longname", "snow water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / + "TRACER", "atmos_mod", "graupel" + "longname", "graupel mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic subgrid scale turbulent kinetic energy + "TRACER", "atmos_mod", "sgs_tke" + "longname", "subgrid scale turbulent kinetic energy" + "units", "m2/s2" + "profile_type", "fixed", "surface_value=0.0" / +# prognotsitc sigmab tracer + "TRACER", "atmos_mod", "sigmab" + "longname", "sigma fraction" + "units", "fraction" + "profile_type", "fixed", "surface_value=0.0" / diff --git a/parm/parm_fv3diag/field_table_zhaocarr_progsigma b/parm/parm_fv3diag/field_table_zhaocarr_progsigma new file mode 100644 index 0000000000..9a1a1abf5d --- /dev/null +++ b/parm/parm_fv3diag/field_table_zhaocarr_progsigma @@ -0,0 +1,21 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognotsitc sigmab tracer + "TRACER", "atmos_mod", "sigmab" + "longname", "sigma fraction" + "units", "fraction" + "profile_type", "fixed", "surface_value=0.0" / diff --git a/parm/parm_fv3diag/field_table_zhaocarr_satmedmf_progsigma b/parm/parm_fv3diag/field_table_zhaocarr_satmedmf_progsigma new file mode 100644 index 0000000000..5b29a4375d --- /dev/null +++ b/parm/parm_fv3diag/field_table_zhaocarr_satmedmf_progsigma @@ -0,0 +1,26 @@ +# added by FRE: sphum must be present in atmos +# specific humidity for moist runs + "TRACER", "atmos_mod", "sphum" + "longname", "specific humidity" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=3.e-6" / +# prognostic cloud water mixing ratio + "TRACER", "atmos_mod", "liq_wat" + "longname", "cloud water mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic ozone mixing ratio tracer + "TRACER", "atmos_mod", "o3mr" + "longname", "ozone mixing ratio" + "units", "kg/kg" + "profile_type", "fixed", "surface_value=1.e30" / +# prognostic subgrid scale turbulent kinetic energy + "TRACER", "atmos_mod", "sgs_tke" + "longname", "subgrid scale turbulent kinetic energy" + "units", "m2/s2" + "profile_type", "fixed", "surface_value=0.0" / +# prognotsitc sigmab tracer + "TRACER", "atmos_mod", "sigmab" + "longname", "sigma fraction" + "units", "fraction" + "profile_type", "fixed", "surface_value=0.0" / \ No newline at end of file diff --git a/parm/parm_gdas/aero_crtm_coeff.yaml b/parm/parm_gdas/aero_crtm_coeff.yaml new file mode 100644 index 0000000000..d310ff6d31 --- /dev/null +++ b/parm/parm_gdas/aero_crtm_coeff.yaml @@ -0,0 +1,13 @@ +mkdir: +- $(DATA)/crtm/ +copy: +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/AerosolCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/CloudCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/v.viirs-m_npp.SpcCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/v.viirs-m_npp.TauCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/v.viirs-m_j1.SpcCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/v.viirs-m_j1.TauCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/NPOESS.VISice.EmisCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/NPOESS.VISland.EmisCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/NPOESS.VISsnow.EmisCoeff.bin, $(DATA)/crtm/] +- [$(FV3JEDI_FIX)/crtm/$(crtm_VERSION)/NPOESS.VISwater.EmisCoeff.bin, $(DATA)/crtm/] diff --git a/parm/parm_gdas/aero_jedi_fix.yaml b/parm/parm_gdas/aero_jedi_fix.yaml new file mode 100644 index 0000000000..31ece4ff8f --- /dev/null +++ b/parm/parm_gdas/aero_jedi_fix.yaml @@ -0,0 +1,11 @@ +mkdir: +- !ENV ${DATA}/fv3jedi +copy: +- - !ENV ${FV3JEDI_FIX}/fv3jedi/fv3files/akbk$(npz).nc4 + - !ENV ${DATA}/fv3jedi/akbk.nc4 +- - !ENV ${FV3JEDI_FIX}/fv3jedi/fv3files/fmsmpp.nml + - !ENV ${DATA}/fv3jedi/fmsmpp.nml +- - !ENV ${FV3JEDI_FIX}/fv3jedi/fv3files/field_table_gfdl + - !ENV ${DATA}/fv3jedi/field_table +- - !ENV ${FV3JEDI_FIX}/fv3jedi/fieldmetadata/gfs-aerosol.yaml + - !ENV ${DATA}/fv3jedi/gfs-restart.yaml diff --git a/parm/parm_gdas/aeroanl_inc_vars.yaml b/parm/parm_gdas/aeroanl_inc_vars.yaml new file mode 100644 index 0000000000..298373d6e2 --- /dev/null +++ b/parm/parm_gdas/aeroanl_inc_vars.yaml @@ -0,0 +1 @@ +incvars: ['dust1', 'dust2', 'dust3', 'dust4', 'dust5', 'seas1', 'seas2', 'seas3', 'seas4', 'so4', 'oc1', 'oc2', 'bc1', 'bc2'] diff --git a/parm/parm_gdas/atm_crtm_coeff.yaml b/parm/parm_gdas/atm_crtm_coeff.yaml new file mode 100644 index 0000000000..8e8d433b06 --- /dev/null +++ b/parm/parm_gdas/atm_crtm_coeff.yaml @@ -0,0 +1,178 @@ +mkdir: +- $(DATA)/crtm +copy: +# Emissivity files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISice.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISland.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISsnow.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.VISwater.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRice.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRland.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/NPOESS.IRsnow.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/Nalli.IRwater.EmisCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/FASTEM6.MWwater.EmisCoeff.bin, $(DATA)/crtm] +# Aerosol and Cloud files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/AerosolCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/CloudCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/CloudCoeff.GFDLFV3.-109z-1.bin, $(DATA)/crtm] +# Satellite_Sensor specific Tau and Spc coefficient files +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g16.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g16.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g17.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g18.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/abi_g18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari8.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari8.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari9.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ahi_himawari9.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/airs_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/airs_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsr2_gcom-w1.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsr2_gcom-w1.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsre_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsre_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_aqua.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_aqua.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsub_n17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsub_n17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n20.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n21.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_n21.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/atms_npp.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/avhrr3_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n20.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n21.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_n21.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/cris-fsr_npp.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/gmi_gpm.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/gmi_gpm.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs3_n17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs3_n17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/hirs4_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/iasi_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/imgr_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-a.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-a.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-b.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-b.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-c.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_metop-c.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/mhs_n19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/saphir_meghat.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/saphir_meghat.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m08.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m08.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m09.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m09.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m10.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m10.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/seviri_m11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD1_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD2_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD3_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g11.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g11.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g12.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g12.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g13.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g13.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g14.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g14.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/sndrD4_g15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmi_f15.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmi_f15.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f16.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f16.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f17.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f17.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f18.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f18.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f19.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f19.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f20.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/ssmis_f20.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j1.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j1.TauCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j2.SpcCoeff.bin, $(DATA)/crtm] +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_j2.TauCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_npp.SpcCoeff.bin, $(DATA)/crtm] +- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/viirs-m_npp.TauCoeff.bin, $(DATA)/crtm] +# Special Spc files +##- [$(HOMEgfs)/fix/gdas/crtm/$(crtm_VERSION)/amsua_metop-a_v2.SpcCoeff.bin, $(DATA)/crtm] diff --git a/parm/parm_gdas/atm_jedi_fix.yaml b/parm/parm_gdas/atm_jedi_fix.yaml new file mode 100644 index 0000000000..07b0fe49f1 --- /dev/null +++ b/parm/parm_gdas/atm_jedi_fix.yaml @@ -0,0 +1,7 @@ +mkdir: +- $(DATA)/fv3jedi +copy: +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/akbk$(npz).nc4, $(DATA)/fv3jedi/akbk.nc4] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/fmsmpp.nml, $(DATA)/fv3jedi/fmsmpp.nml] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fv3files/field_table_gfdl, $(DATA)/fv3jedi/field_table] +- [$(HOMEgfs)/fix/gdas/fv3jedi/fieldmetadata/gfs-restart.yaml, $(DATA)/fv3jedi/gfs-restart.yaml] diff --git a/parm/parm_gdas/atmanl_inc_vars.yaml b/parm/parm_gdas/atmanl_inc_vars.yaml new file mode 100644 index 0000000000..cb6718ce9f --- /dev/null +++ b/parm/parm_gdas/atmanl_inc_vars.yaml @@ -0,0 +1 @@ +incvars: ['ua', 'va', 't', 'sphum', 'liq_wat', 'ice_wat', 'o3mr'] diff --git a/parm/transfer_gdas_1a.list b/parm/transfer/transfer_gdas_1a.list similarity index 96% rename from parm/transfer_gdas_1a.list rename to parm/transfer/transfer_gdas_1a.list index 01e67f6dd0..00c5306f3b 100644 --- a/parm/transfer_gdas_1a.list +++ b/parm/transfer/transfer_gdas_1a.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/*atmf000* @@ -39,7 +39,7 @@ com/gfs/_ENVIR_/gdas._PDY_/ B 100 -com/gfs/_ENVIR_/gdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/*atmf000* diff --git a/parm/transfer_gdas_1b.list b/parm/transfer/transfer_gdas_1b.list similarity index 96% rename from parm/transfer_gdas_1b.list rename to parm/transfer/transfer_gdas_1b.list index ef4829740b..998c71d843 100644 --- a/parm/transfer_gdas_1b.list +++ b/parm/transfer/transfer_gdas_1b.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/*atmf007* @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/gdas._PDY_/ B 100 -com/gfs/_ENVIR_/gdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/*atmf007* diff --git a/parm/transfer_gdas_1c.list b/parm/transfer/transfer_gdas_1c.list similarity index 96% rename from parm/transfer_gdas_1c.list rename to parm/transfer/transfer_gdas_1c.list index 7c8ff99481..42b1bd4f64 100644 --- a/parm/transfer_gdas_1c.list +++ b/parm/transfer/transfer_gdas_1c.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ - /??/atmos/*atmf000* - /??/atmos/*atmf001* - /??/atmos/*atmf002* @@ -44,7 +44,7 @@ com/gfs/_ENVIR_/gdas._PDY_/ B 100 -com/gfs/_ENVIR_/gdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ - /??/atmos/*atmf000* - /??/atmos/*atmf001* - /??/atmos/*atmf002* diff --git a/parm/transfer_gdas_enkf_enkf_05.list b/parm/transfer/transfer_gdas_enkf_enkf_05.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_05.list rename to parm/transfer/transfer_gdas_enkf_enkf_05.list index 518636342c..fe1be06e8c 100644 --- a/parm/transfer_gdas_enkf_enkf_05.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_05.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem001/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem001/ diff --git a/parm/transfer_gdas_enkf_enkf_10.list b/parm/transfer/transfer_gdas_enkf_enkf_10.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_10.list rename to parm/transfer/transfer_gdas_enkf_enkf_10.list index db2b5c2568..b2298be8af 100644 --- a/parm/transfer_gdas_enkf_enkf_10.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_10.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem006/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem006/ diff --git a/parm/transfer_gdas_enkf_enkf_15.list b/parm/transfer/transfer_gdas_enkf_enkf_15.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_15.list rename to parm/transfer/transfer_gdas_enkf_enkf_15.list index 30e10b51cc..435de61bba 100644 --- a/parm/transfer_gdas_enkf_enkf_15.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_15.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem011/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem011/ diff --git a/parm/transfer_gdas_enkf_enkf_20.list b/parm/transfer/transfer_gdas_enkf_enkf_20.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_20.list rename to parm/transfer/transfer_gdas_enkf_enkf_20.list index 493bb2cc52..e329d227bd 100644 --- a/parm/transfer_gdas_enkf_enkf_20.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_20.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem016/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem016/ diff --git a/parm/transfer_gdas_enkf_enkf_25.list b/parm/transfer/transfer_gdas_enkf_enkf_25.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_25.list rename to parm/transfer/transfer_gdas_enkf_enkf_25.list index 8e91b1af34..fb6d964369 100644 --- a/parm/transfer_gdas_enkf_enkf_25.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_25.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem021/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem021/ diff --git a/parm/transfer_gdas_enkf_enkf_30.list b/parm/transfer/transfer_gdas_enkf_enkf_30.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_30.list rename to parm/transfer/transfer_gdas_enkf_enkf_30.list index d29b79e871..bce22494a4 100644 --- a/parm/transfer_gdas_enkf_enkf_30.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_30.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem026/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem026/ diff --git a/parm/transfer_gdas_enkf_enkf_35.list b/parm/transfer/transfer_gdas_enkf_enkf_35.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_35.list rename to parm/transfer/transfer_gdas_enkf_enkf_35.list index 60e69aaeb6..6397c6693e 100644 --- a/parm/transfer_gdas_enkf_enkf_35.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_35.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem031/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem031/ diff --git a/parm/transfer_gdas_enkf_enkf_40.list b/parm/transfer/transfer_gdas_enkf_enkf_40.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_40.list rename to parm/transfer/transfer_gdas_enkf_enkf_40.list index 1ce4d8e3d9..d8a85e529c 100644 --- a/parm/transfer_gdas_enkf_enkf_40.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_40.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem036/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem036/ diff --git a/parm/transfer_gdas_enkf_enkf_45.list b/parm/transfer/transfer_gdas_enkf_enkf_45.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_45.list rename to parm/transfer/transfer_gdas_enkf_enkf_45.list index 50b2c35d6f..fd8b1d5299 100644 --- a/parm/transfer_gdas_enkf_enkf_45.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_45.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem041/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem041/ diff --git a/parm/transfer_gdas_enkf_enkf_50.list b/parm/transfer/transfer_gdas_enkf_enkf_50.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_50.list rename to parm/transfer/transfer_gdas_enkf_enkf_50.list index 69a895bffb..dd7721505b 100644 --- a/parm/transfer_gdas_enkf_enkf_50.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_50.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem046/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem046/ diff --git a/parm/transfer_gdas_enkf_enkf_55.list b/parm/transfer/transfer_gdas_enkf_enkf_55.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_55.list rename to parm/transfer/transfer_gdas_enkf_enkf_55.list index a1cbd458e7..4606feb727 100644 --- a/parm/transfer_gdas_enkf_enkf_55.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_55.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem051/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem051/ diff --git a/parm/transfer_gdas_enkf_enkf_60.list b/parm/transfer/transfer_gdas_enkf_enkf_60.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_60.list rename to parm/transfer/transfer_gdas_enkf_enkf_60.list index 494b8003a3..e5764082dc 100644 --- a/parm/transfer_gdas_enkf_enkf_60.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_60.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem056/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem056/ diff --git a/parm/transfer_gdas_enkf_enkf_65.list b/parm/transfer/transfer_gdas_enkf_enkf_65.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_65.list rename to parm/transfer/transfer_gdas_enkf_enkf_65.list index 63d5af0575..15e12a0660 100644 --- a/parm/transfer_gdas_enkf_enkf_65.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_65.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem061/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem061/ diff --git a/parm/transfer_gdas_enkf_enkf_70.list b/parm/transfer/transfer_gdas_enkf_enkf_70.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_70.list rename to parm/transfer/transfer_gdas_enkf_enkf_70.list index b5d484407f..88accbffb9 100644 --- a/parm/transfer_gdas_enkf_enkf_70.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_70.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem066/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem066/ diff --git a/parm/transfer_gdas_enkf_enkf_75.list b/parm/transfer/transfer_gdas_enkf_enkf_75.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_75.list rename to parm/transfer/transfer_gdas_enkf_enkf_75.list index f66df06e64..3db4f41a03 100644 --- a/parm/transfer_gdas_enkf_enkf_75.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_75.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem071/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem071/ diff --git a/parm/transfer_gdas_enkf_enkf_80.list b/parm/transfer/transfer_gdas_enkf_enkf_80.list similarity index 92% rename from parm/transfer_gdas_enkf_enkf_80.list rename to parm/transfer/transfer_gdas_enkf_enkf_80.list index cbf1a8a9cd..58ca72515d 100644 --- a/parm/transfer_gdas_enkf_enkf_80.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_80.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/mem076/ @@ -42,8 +41,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/mem076/ diff --git a/parm/transfer_gdas_enkf_enkf_misc.list b/parm/transfer/transfer_gdas_enkf_enkf_misc.list similarity index 91% rename from parm/transfer_gdas_enkf_enkf_misc.list rename to parm/transfer/transfer_gdas_enkf_enkf_misc.list index 14d052889e..141b4af1da 100644 --- a/parm/transfer_gdas_enkf_enkf_misc.list +++ b/parm/transfer/transfer_gdas_enkf_enkf_misc.list @@ -24,8 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -#com/gfs/_ENVIR_/enkfgdas._PDY_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/* @@ -33,8 +32,7 @@ B 100 -#com/gfs/_ENVIR_/enkfgdas._PDYm1_/ -/gpfs/dell1/nco/ops/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/* diff --git a/parm/transfer_gdas_misc.list b/parm/transfer/transfer_gdas_misc.list similarity index 89% rename from parm/transfer_gdas_misc.list rename to parm/transfer/transfer_gdas_misc.list index 6687262a45..3d7fe47a88 100644 --- a/parm/transfer_gdas_misc.list +++ b/parm/transfer/transfer_gdas_misc.list @@ -30,30 +30,30 @@ #B 4500000 -com/gfs/_ENVIR_/syndat/ +_COMROOT_/gfs/_SHORTVER_/syndat/ B 180 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /data_counts._MONPREV_/*** - * B 16000000 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /data_counts._MONCUR_/*** - * B 16000000 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /satcounts._MONPREV_/*** - * B 16000000 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /satcounts._MONCUR_/*** - * B 16000000 -com/gfs/_ENVIR_/sdm_rtdm/ +_COMROOT_/gfs/_SHORTVER_/sdm_rtdm/ + /obcount_30day/ + /obcount_30day/gdas/ + /obcount_30day/gdas/gdas._PDYm1_/*** @@ -61,14 +61,14 @@ com/gfs/_ENVIR_/sdm_rtdm/ - * B 2000000 -com/gfs/_ENVIR_/sdm_rtdm/ +_COMROOT_/gfs/_SHORTVER_/sdm_rtdm/ + /avgdata/ + /avgdata/obcount_30davg.gdas._MONPREV_ + /avgdata/obcount_30davg.gdas.current - * B 256000 -com/gfs/_ENVIR_/gdascounts/ +_COMROOT_/gfs/_SHORTVER_/gdascounts/ + /index.shtml + /index_backup.shtml - * diff --git a/parm/transfer_gfs_1.list b/parm/transfer/transfer_gfs_1.list similarity index 96% rename from parm/transfer_gfs_1.list rename to parm/transfer/transfer_gfs_1.list index ce70343214..84e852ff82 100644 --- a/parm/transfer_gfs_1.list +++ b/parm/transfer/transfer_gfs_1.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ - /??/atmos/gfs.t??z.atmf???.nc @@ -36,7 +36,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - /??/wave/* B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ - /??/atmos/gfs.t??z.atmf???.nc diff --git a/parm/transfer_gfs_10a.list b/parm/transfer/transfer_gfs_10a.list similarity index 96% rename from parm/transfer_gfs_10a.list rename to parm/transfer/transfer_gfs_10a.list index a938573073..3beb6d15fd 100644 --- a/parm/transfer_gfs_10a.list +++ b/parm/transfer/transfer_gfs_10a.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.master.*1 @@ -35,7 +35,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - * B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.master.*1 diff --git a/parm/transfer_gfs_10b.list b/parm/transfer/transfer_gfs_10b.list similarity index 96% rename from parm/transfer_gfs_10b.list rename to parm/transfer/transfer_gfs_10b.list index 83467d6d25..34522d1e2f 100644 --- a/parm/transfer_gfs_10b.list +++ b/parm/transfer/transfer_gfs_10b.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.master.*0 @@ -35,7 +35,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ - * B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.master.*0 diff --git a/parm/transfer_gfs_2.list b/parm/transfer/transfer_gfs_2.list similarity index 98% rename from parm/transfer_gfs_2.list rename to parm/transfer/transfer_gfs_2.list index 373ebfae05..f0ea9bc9c5 100644 --- a/parm/transfer_gfs_2.list +++ b/parm/transfer/transfer_gfs_2.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf000.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf000.nc diff --git a/parm/transfer_gfs_3.list b/parm/transfer/transfer_gfs_3.list similarity index 98% rename from parm/transfer_gfs_3.list rename to parm/transfer/transfer_gfs_3.list index a5218e198a..636077381e 100644 --- a/parm/transfer_gfs_3.list +++ b/parm/transfer/transfer_gfs_3.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf001.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf001.nc diff --git a/parm/transfer_gfs_4.list b/parm/transfer/transfer_gfs_4.list similarity index 98% rename from parm/transfer_gfs_4.list rename to parm/transfer/transfer_gfs_4.list index 37acec25ab..b45e4027ff 100644 --- a/parm/transfer_gfs_4.list +++ b/parm/transfer/transfer_gfs_4.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf002.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf002.nc diff --git a/parm/transfer_gfs_5.list b/parm/transfer/transfer_gfs_5.list similarity index 98% rename from parm/transfer_gfs_5.list rename to parm/transfer/transfer_gfs_5.list index 01e01c2447..21f59df4f8 100644 --- a/parm/transfer_gfs_5.list +++ b/parm/transfer/transfer_gfs_5.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf003.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf003.nc diff --git a/parm/transfer_gfs_6.list b/parm/transfer/transfer_gfs_6.list similarity index 98% rename from parm/transfer_gfs_6.list rename to parm/transfer/transfer_gfs_6.list index de661359f7..5e90f975fc 100644 --- a/parm/transfer_gfs_6.list +++ b/parm/transfer/transfer_gfs_6.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf004.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf004.nc diff --git a/parm/transfer_gfs_7.list b/parm/transfer/transfer_gfs_7.list similarity index 98% rename from parm/transfer_gfs_7.list rename to parm/transfer/transfer_gfs_7.list index 841d671944..e3b8dad532 100644 --- a/parm/transfer_gfs_7.list +++ b/parm/transfer/transfer_gfs_7.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf005.nc @@ -61,7 +61,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf005.nc diff --git a/parm/transfer_gfs_8.list b/parm/transfer/transfer_gfs_8.list similarity index 98% rename from parm/transfer_gfs_8.list rename to parm/transfer/transfer_gfs_8.list index 744ef24e70..df146fd207 100644 --- a/parm/transfer_gfs_8.list +++ b/parm/transfer/transfer_gfs_8.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf006.nc @@ -60,7 +60,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.atmf006.nc diff --git a/parm/transfer_gfs_9a.list b/parm/transfer/transfer_gfs_9a.list similarity index 96% rename from parm/transfer_gfs_9a.list rename to parm/transfer/transfer_gfs_9a.list index 2f3c34aaaa..44d316c81e 100644 --- a/parm/transfer_gfs_9a.list +++ b/parm/transfer/transfer_gfs_9a.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.sfcf??0.nc @@ -36,7 +36,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.sfcf??0.nc diff --git a/parm/transfer_gfs_9b.list b/parm/transfer/transfer_gfs_9b.list similarity index 96% rename from parm/transfer_gfs_9b.list rename to parm/transfer/transfer_gfs_9b.list index fd87ee88d4..b2571dfb7c 100644 --- a/parm/transfer_gfs_9b.list +++ b/parm/transfer/transfer_gfs_9b.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.sfcf??1.nc @@ -36,7 +36,7 @@ com/gfs/_ENVIR_/gfs._PDY_/ B 100 -com/gfs/_ENVIR_/gfs._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.sfcf??1.nc diff --git a/parm/transfer/transfer_gfs_gempak.list b/parm/transfer/transfer_gfs_gempak.list new file mode 100644 index 0000000000..e491821d69 --- /dev/null +++ b/parm/transfer/transfer_gfs_gempak.list @@ -0,0 +1,45 @@ +# This file specifies the directories to be tranatmfered and, optionally, the files within +# those directories to include or exclude. If one directory is specified per line, it +# will be used as both the source and destination. If two directories are specified per +# line, separated by one or more spaces, the first will be used as the source and the +# second the destination. Directories that begin with "com/" will be resolved using +# the compath.py utility. Rules may be placed below each directory or directory pair +# and must begin with one of the following characters: +# - exclude, specifies an exclude pattern +# + include, specifies an include pattern +# . merge, specifies a merge-file to read for more rules +# : dir-merge, specifies a per-directory merge-file +# H hide, specifies a pattern for hiding files from the tranatmfer +# S show, files that match the pattern are not hidden +# P protect, specifies a pattern for protecting files from deletion +# R risk, files that match the pattern are not protected +# ! clear, clears the current include/exclude list (takes no arg) +# B bytes, relative size of the path in relation to the other paths in the list +# D delete, delete extraneous files from destination directories (takes no arg) +# E encrypt, enables data encryption [two cores should be allocated] (takes no arg) +# W whole files, copy whole files rather than use delta-xfer algorithm (takes no arg) (v2.2.3+) +# T two-way syncronization will update both sides with latest changes (takes no arg) +# Z compress data as it is sent, accepts optional compression level argument (1-9) +# Rules higher in the list take precedence over lower ones. By default, all files in a +# directory are included, so if no exclude patterns match that file, it will be +# tranatmferred. + +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ ++ /??/ ++ /??/atmos/ ++ /??/atmos/gempak/ ++ /??/atmos/gempak/* +- * + +B 100 + +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ ++ /??/ ++ /??/atmos/ ++ /??/atmos/gempak/ ++ /??/atmos/gempak/* +- * + +B 100 + + diff --git a/parm/transfer_gfs_misc.list b/parm/transfer/transfer_gfs_misc.list similarity index 96% rename from parm/transfer_gfs_misc.list rename to parm/transfer/transfer_gfs_misc.list index e8448e59cc..32f002d1e7 100644 --- a/parm/transfer_gfs_misc.list +++ b/parm/transfer/transfer_gfs_misc.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # tranatmferred. -com/gfs/_ENVIR_/sdm_rtdm/ +_COMROOT_/gfs/_SHORTVER_/sdm_rtdm/ + /avgdata/ + /avgdata/obcount_30davg.gfs._MONPREV_ + /avgdata/obcount_30davg.gfs.current @@ -32,7 +32,7 @@ com/gfs/_ENVIR_/sdm_rtdm/ B 256000 -com/gfs/_ENVIR_/sdm_rtdm/ +_COMROOT_/gfs/_SHORTVER_/sdm_rtdm/ + /obcount_30day/ + /obcount_30day/gfs/ + /obcount_30day/gfs/gfs._PDYm1_/*** diff --git a/parm/transfer/transfer_gfs_wave_restart1.list b/parm/transfer/transfer_gfs_wave_restart1.list new file mode 100644 index 0000000000..cdac47428a --- /dev/null +++ b/parm/transfer/transfer_gfs_wave_restart1.list @@ -0,0 +1,44 @@ +# This file specifies the directories to be tranatmfered and, optionally, the files within +# those directories to include or exclude. If one directory is specified per line, it +# will be used as both the source and destination. If two directories are specified per +# line, separated by one or more spaces, the first will be used as the source and the +# second the destination. Directories that begin with "com/" will be resolved using +# the compath.py utility. Rules may be placed below each directory or directory pair +# and must begin with one of the following characters: +# - exclude, specifies an exclude pattern +# + include, specifies an include pattern +# . merge, specifies a merge-file to read for more rules +# : dir-merge, specifies a per-directory merge-file +# H hide, specifies a pattern for hiding files from the tranatmfer +# S show, files that match the pattern are not hidden +# P protect, specifies a pattern for protecting files from deletion +# R risk, files that match the pattern are not protected +# ! clear, clears the current include/exclude list (takes no arg) +# B bytes, relative size of the path in relation to the other paths in the list +# D delete, delete extraneous files from destination directories (takes no arg) +# E encrypt, enables data encryption [two cores should be allocated] (takes no arg) +# W whole files, copy whole files rather than use delta-xfer algorithm (takes no arg) (v2.2.3+) +# T two-way syncronization will update both sides with latest changes (takes no arg) +# Z compress data as it is sent, accepts optional compression level argument (1-9) +# Rules higher in the list take precedence over lower ones. By default, all files in a +# directory are included, so if no exclude patterns match that file, it will be +# tranatmferred. + +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ ++ /??/ ++ /??/wave/ ++ /??/wave/restart/ ++ /??/wave/restart/*aoc_9km +- * +B 100 + +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ ++ /??/ ++ /??/wave/ ++ /??/wave/restart/ ++ /??/wave/restart/*aoc_9km +- * +B 100 + + + diff --git a/parm/transfer/transfer_gfs_wave_restart2.list b/parm/transfer/transfer_gfs_wave_restart2.list new file mode 100644 index 0000000000..6f4eb289af --- /dev/null +++ b/parm/transfer/transfer_gfs_wave_restart2.list @@ -0,0 +1,44 @@ +# This file specifies the directories to be tranatmfered and, optionally, the files within +# those directories to include or exclude. If one directory is specified per line, it +# will be used as both the source and destination. If two directories are specified per +# line, separated by one or more spaces, the first will be used as the source and the +# second the destination. Directories that begin with "com/" will be resolved using +# the compath.py utility. Rules may be placed below each directory or directory pair +# and must begin with one of the following characters: +# - exclude, specifies an exclude pattern +# + include, specifies an include pattern +# . merge, specifies a merge-file to read for more rules +# : dir-merge, specifies a per-directory merge-file +# H hide, specifies a pattern for hiding files from the tranatmfer +# S show, files that match the pattern are not hidden +# P protect, specifies a pattern for protecting files from deletion +# R risk, files that match the pattern are not protected +# ! clear, clears the current include/exclude list (takes no arg) +# B bytes, relative size of the path in relation to the other paths in the list +# D delete, delete extraneous files from destination directories (takes no arg) +# E encrypt, enables data encryption [two cores should be allocated] (takes no arg) +# W whole files, copy whole files rather than use delta-xfer algorithm (takes no arg) (v2.2.3+) +# T two-way syncronization will update both sides with latest changes (takes no arg) +# Z compress data as it is sent, accepts optional compression level argument (1-9) +# Rules higher in the list take precedence over lower ones. By default, all files in a +# directory are included, so if no exclude patterns match that file, it will be +# tranatmferred. + +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ ++ /??/ ++ /??/wave/ ++ /??/wave/restart/ ++ /??/wave/restart/*gsh_15m +- * +B 100 + +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ ++ /??/ ++ /??/wave/ ++ /??/wave/restart/ ++ /??/wave/restart/*gsh_15m +- * +B 100 + + + diff --git a/parm/transfer/transfer_gfs_wave_restart3.list b/parm/transfer/transfer_gfs_wave_restart3.list new file mode 100644 index 0000000000..c8005e53eb --- /dev/null +++ b/parm/transfer/transfer_gfs_wave_restart3.list @@ -0,0 +1,44 @@ +# This file specifies the directories to be tranatmfered and, optionally, the files within +# those directories to include or exclude. If one directory is specified per line, it +# will be used as both the source and destination. If two directories are specified per +# line, separated by one or more spaces, the first will be used as the source and the +# second the destination. Directories that begin with "com/" will be resolved using +# the compath.py utility. Rules may be placed below each directory or directory pair +# and must begin with one of the following characters: +# - exclude, specifies an exclude pattern +# + include, specifies an include pattern +# . merge, specifies a merge-file to read for more rules +# : dir-merge, specifies a per-directory merge-file +# H hide, specifies a pattern for hiding files from the tranatmfer +# S show, files that match the pattern are not hidden +# P protect, specifies a pattern for protecting files from deletion +# R risk, files that match the pattern are not protected +# ! clear, clears the current include/exclude list (takes no arg) +# B bytes, relative size of the path in relation to the other paths in the list +# D delete, delete extraneous files from destination directories (takes no arg) +# E encrypt, enables data encryption [two cores should be allocated] (takes no arg) +# W whole files, copy whole files rather than use delta-xfer algorithm (takes no arg) (v2.2.3+) +# T two-way syncronization will update both sides with latest changes (takes no arg) +# Z compress data as it is sent, accepts optional compression level argument (1-9) +# Rules higher in the list take precedence over lower ones. By default, all files in a +# directory are included, so if no exclude patterns match that file, it will be +# tranatmferred. + +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ ++ /??/ ++ /??/wave/ ++ /??/wave/restart/ ++ /??/wave/restart/*gnh_10m +- * +B 100 + +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ ++ /??/ ++ /??/wave/ ++ /??/wave/restart/ ++ /??/wave/restart/*gnh_10m +- * +B 100 + + + diff --git a/parm/transfer/transfer_gfs_wave_rundata.list b/parm/transfer/transfer_gfs_wave_rundata.list new file mode 100644 index 0000000000..dfacfe48f7 --- /dev/null +++ b/parm/transfer/transfer_gfs_wave_rundata.list @@ -0,0 +1,44 @@ +# This file specifies the directories to be tranatmfered and, optionally, the files within +# those directories to include or exclude. If one directory is specified per line, it +# will be used as both the source and destination. If two directories are specified per +# line, separated by one or more spaces, the first will be used as the source and the +# second the destination. Directories that begin with "com/" will be resolved using +# the compath.py utility. Rules may be placed below each directory or directory pair +# and must begin with one of the following characters: +# - exclude, specifies an exclude pattern +# + include, specifies an include pattern +# . merge, specifies a merge-file to read for more rules +# : dir-merge, specifies a per-directory merge-file +# H hide, specifies a pattern for hiding files from the tranatmfer +# S show, files that match the pattern are not hidden +# P protect, specifies a pattern for protecting files from deletion +# R risk, files that match the pattern are not protected +# ! clear, clears the current include/exclude list (takes no arg) +# B bytes, relative size of the path in relation to the other paths in the list +# D delete, delete extraneous files from destination directories (takes no arg) +# E encrypt, enables data encryption [two cores should be allocated] (takes no arg) +# W whole files, copy whole files rather than use delta-xfer algorithm (takes no arg) (v2.2.3+) +# T two-way syncronization will update both sides with latest changes (takes no arg) +# Z compress data as it is sent, accepts optional compression level argument (1-9) +# Rules higher in the list take precedence over lower ones. By default, all files in a +# directory are included, so if no exclude patterns match that file, it will be +# tranatmferred. + +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ ++ /??/ ++ /??/wave/ ++ /??/wave/rundata/ ++ /??/wave/rundata/* +- * + +B 100 + +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ ++ /??/ ++ /??/wave/ ++ /??/wave/rundata/ ++ /??/wave/rundata/* +- * + +B 100 + diff --git a/parm/transfer/transfer_gfs_wave_wave.list b/parm/transfer/transfer_gfs_wave_wave.list new file mode 100644 index 0000000000..03cf074797 --- /dev/null +++ b/parm/transfer/transfer_gfs_wave_wave.list @@ -0,0 +1,51 @@ +# This file specifies the directories to be tranatmfered and, optionally, the files within +# those directories to include or exclude. If one directory is specified per line, it +# will be used as both the source and destination. If two directories are specified per +# line, separated by one or more spaces, the first will be used as the source and the +# second the destination. Directories that begin with "com/" will be resolved using +# the compath.py utility. Rules may be placed below each directory or directory pair +# and must begin with one of the following characters: +# - exclude, specifies an exclude pattern +# + include, specifies an include pattern +# . merge, specifies a merge-file to read for more rules +# : dir-merge, specifies a per-directory merge-file +# H hide, specifies a pattern for hiding files from the tranatmfer +# S show, files that match the pattern are not hidden +# P protect, specifies a pattern for protecting files from deletion +# R risk, files that match the pattern are not protected +# ! clear, clears the current include/exclude list (takes no arg) +# B bytes, relative size of the path in relation to the other paths in the list +# D delete, delete extraneous files from destination directories (takes no arg) +# E encrypt, enables data encryption [two cores should be allocated] (takes no arg) +# W whole files, copy whole files rather than use delta-xfer algorithm (takes no arg) (v2.2.3+) +# T two-way syncronization will update both sides with latest changes (takes no arg) +# Z compress data as it is sent, accepts optional compression level argument (1-9) +# Rules higher in the list take precedence over lower ones. By default, all files in a +# directory are included, so if no exclude patterns match that file, it will be +# tranatmferred. + +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ ++ /??/ ++ /??/wave/ ++ /??/wave/gridded/ ++ /??/wave/gridded/* ++ /??/wave/station/ ++ /??/wave/station/* ++ /??/wave/gempak/ ++ /??/wave/gempak/* +- * +B 100 + +_COMROOT_/gfs/_SHORTVER_/gfs._PDYm1_/ ++ /??/ ++ /??/wave/ ++ /??/wave/gridded/ ++ /??/wave/gridded/* ++ /??/wave/station/ ++ /??/wave/station/* ++ /??/wave/gempak/ ++ /??/wave/gempak/* +- * +B 100 + + diff --git a/parm/transfer_rdhpcs_gdas.list b/parm/transfer/transfer_rdhpcs_gdas.list similarity index 94% rename from parm/transfer_rdhpcs_gdas.list rename to parm/transfer/transfer_rdhpcs_gdas.list index e3811d3aa6..a154b022ed 100644 --- a/parm/transfer_rdhpcs_gdas.list +++ b/parm/transfer/transfer_rdhpcs_gdas.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # transferred. -com/gfs/_ENVIR_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_/ + /??/ + /??/atmos/ + /??/atmos/gdas.t??z*tcvitals* @@ -45,7 +45,7 @@ com/gfs/_ENVIR_/gdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/gdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gdas._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gdas.t??z*tcvitals* diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_1.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_1.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list index f924cbd377..aae14dc120 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_1.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_1.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /00/ + /00/atmos/ + /00/atmos/mem???/ @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /00/ + /00/atmos/ + /00/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_2.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_2.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list index f7b2f03f9e..1cf3b8f5e4 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_2.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_2.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /06/ + /06/atmos/ + /06/atmos/mem???/ @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /06/ + /06/atmos/ + /06/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_3.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_3.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list index f51726923b..ee0dae4c34 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_3.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_3.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /12/ + /12/atmos/ + /12/atmos/mem???/ @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /12/ + /12/atmos/ + /12/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_4.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_4.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list index 85c541beb8..29f1a601d1 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_4.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_4.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /18/ + /18/atmos/ + /18/atmos/mem???/ @@ -37,7 +37,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /18/ + /18/atmos/ + /18/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_5.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_5.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list index 44bf0f4662..7d1dd9ff6a 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_5.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_5.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /00/ + /00/atmos/ + /00/atmos/mem???/ @@ -38,7 +38,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /00/ + /00/atmos/ + /00/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_6.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_6.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list index 3af2fbae4d..124dbe3aad 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_6.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_6.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /06/ + /06/atmos/ + /06/atmos/mem???/ @@ -38,7 +38,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /06/ + /06/atmos/ + /06/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_7.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_7.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list index 9a86b20c42..58ff55b5d6 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_7.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_7.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /12/ + /12/atmos/ + /12/atmos/mem???/ @@ -38,7 +38,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /12/ + /12/atmos/ + /12/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gdas_enkf_enkf_8.list b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list similarity index 92% rename from parm/transfer_rdhpcs_gdas_enkf_enkf_8.list rename to parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list index 747be01fcd..99d3de2843 100644 --- a/parm/transfer_rdhpcs_gdas_enkf_enkf_8.list +++ b/parm/transfer/transfer_rdhpcs_gdas_enkf_enkf_8.list @@ -27,7 +27,7 @@ # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ + /18/ + /18/atmos/ + /18/atmos/mem???/ @@ -38,7 +38,7 @@ com/gfs/_ENVIR_/enkfgdas._PDYm1_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDYm1_/ E # This directory is a good candidate for compression #Z -com/gfs/_ENVIR_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ +_COMROOT_/gfs/_SHORTVER_/enkfgdas._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/enkfgdas._PDY_/ + /18/ + /18/atmos/ + /18/atmos/mem???/ diff --git a/parm/transfer_rdhpcs_gfs.list b/parm/transfer/transfer_rdhpcs_gfs.list similarity index 97% rename from parm/transfer_rdhpcs_gfs.list rename to parm/transfer/transfer_rdhpcs_gfs.list index 34e006e179..78eedd1f24 100644 --- a/parm/transfer_rdhpcs_gfs.list +++ b/parm/transfer/transfer_rdhpcs_gfs.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # transferred. -com/gfs/_ENVIR_/gfs._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ _REMOTEPATH_/com/gfs/_ENVIR_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gfs.t??z.*bufr* diff --git a/parm/transfer_rdhpcs_gfs_nawips.list b/parm/transfer/transfer_rdhpcs_gfs_nawips.list similarity index 95% rename from parm/transfer_rdhpcs_gfs_nawips.list rename to parm/transfer/transfer_rdhpcs_gfs_nawips.list index 02d80bac9f..3465d3c360 100644 --- a/parm/transfer_rdhpcs_gfs_nawips.list +++ b/parm/transfer/transfer_rdhpcs_gfs_nawips.list @@ -24,7 +24,7 @@ # directory are included, so if no exclude patterns match that file, it will be # transferred. -com/gfs/_ENVIR_/gfs._PDY_/ _REMOTEPATH_/com/nawips/_ENVIR_/gfs._PDY_/ +_COMROOT_/gfs/_SHORTVER_/gfs._PDY_/ _REMOTEPATH_/_ENVIR_/com/gfs/_SHORTVER_/gfs._PDY_/ + /??/ + /??/atmos/ + /??/atmos/gempak/ diff --git a/parm/ufs/fix/gfs/atmos.fixed_files.yaml b/parm/ufs/fix/gfs/atmos.fixed_files.yaml new file mode 100644 index 0000000000..cc82f7a253 --- /dev/null +++ b/parm/ufs/fix/gfs/atmos.fixed_files.yaml @@ -0,0 +1,85 @@ +copy: + # Atmosphere mosaic file linked as the grid_spec file (atm only) + - [$(FIX_orog)/$(atm_res)/$(atm_res)_mosaic.nc, $(DATA)/INPUT/grid_spec.nc] + + # Atmosphere grid tile files + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile1.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile2.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile3.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile4.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile5.nc, $(DATA)/INPUT/] + - [$(FIX_orog)/$(atm_res)/$(atm_res)_grid.tile6.nc, $(DATA)/INPUT/] + + # oro_data_ls and oro_data_ss files from FIX_ugwd + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile1.nc, $(DATA)/INPUT/oro_data_ls.tile1.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile2.nc, $(DATA)/INPUT/oro_data_ls.tile2.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile3.nc, $(DATA)/INPUT/oro_data_ls.tile3.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile4.nc, $(DATA)/INPUT/oro_data_ls.tile4.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile5.nc, $(DATA)/INPUT/oro_data_ls.tile5.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ls.tile6.nc, $(DATA)/INPUT/oro_data_ls.tile6.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile1.nc, $(DATA)/INPUT/oro_data_ss.tile1.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile2.nc, $(DATA)/INPUT/oro_data_ss.tile2.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile3.nc, $(DATA)/INPUT/oro_data_ss.tile3.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile4.nc, $(DATA)/INPUT/oro_data_ss.tile4.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile5.nc, $(DATA)/INPUT/oro_data_ss.tile5.nc] + - [$(FIX_ugwd)/$(atm_res)/$(atm_res)_oro_data_ss.tile6.nc, $(DATA)/INPUT/oro_data_ss.tile6.nc] + + # GWD?? + - [$(FIX_ugwd)/ugwp_limb_tau.nc, $(DATA)/ugwp_limb_tau.nc] + + # CO2 climatology + - [$(FIX_am)/co2monthlycyc.txt, $(DATA)/co2monthlycyc.txt] + - [$(FIX_am)/global_co2historicaldata_glob.txt, $(DATA)/co2historicaldata_glob.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2009.txt, $(DATA)/co2historicaldata_2009.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2010.txt, $(DATA)/co2historicaldata_2010.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2011.txt, $(DATA)/co2historicaldata_2011.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2012.txt, $(DATA)/co2historicaldata_2012.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2013.txt, $(DATA)/co2historicaldata_2013.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2014.txt, $(DATA)/co2historicaldata_2014.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2015.txt, $(DATA)/co2historicaldata_2015.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2016.txt, $(DATA)/co2historicaldata_2016.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2017.txt, $(DATA)/co2historicaldata_2017.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2018.txt, $(DATA)/co2historicaldata_2018.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2019.txt, $(DATA)/co2historicaldata_2019.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2020.txt, $(DATA)/co2historicaldata_2020.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2021.txt, $(DATA)/co2historicaldata_2021.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2022.txt, $(DATA)/co2historicaldata_2022.txt] + - [$(FIX_am)/fix_co2_proj/global_co2historicaldata_2023.txt, $(DATA)/co2historicaldata_2023.txt] + + # FIX_am files + - [$(FIX_am)/global_climaeropac_global.txt, $(DATA)/aerosol.dat] + - [$(FIX_am)/ozprdlos_2015_new_sbuvO3_tclm15_nuchem.f77, $(DATA)/global_o3prdlos.f77] + - [$(FIX_am)/global_h2o_pltc.f77, $(DATA)/global_h2oprdlos.f77] + - [$(FIX_am)/global_glacier.2x2.grb, $(DATA)/global_glacier.2x2.grb] + - [$(FIX_am)/global_maxice.2x2.grb, $(DATA)/global_maxice.2x2.grb] + - [$(FIX_am)/global_snoclim.1.875.grb, $(DATA)/global_snoclim.1.875.grb] + - [$(FIX_am)/global_slmask.t1534.3072.1536.grb, $(DATA)/global_slmask.t1534.3072.1536.grb] + - [$(FIX_am)/global_soilmgldas.statsgo.t1534.3072.1536.grb, $(DATA)/global_soilmgldas.statsgo.t1534.3072.1536.grb] + - [$(FIX_am)/global_solarconstant_noaa_an.txt, $(DATA)/solarconstant_noaa_an.txt] + - [$(FIX_am)/global_sfc_emissivity_idx.txt, $(DATA)/sfc_emissivity_idx.txt] + - [$(FIX_am)/RTGSST.1982.2012.monthly.clim.grb, $(DATA)/RTGSST.1982.2012.monthly.clim.grb] + - [$(FIX_am)/IMS-NIC.blended.ice.monthly.clim.grb, $(DATA)/IMS-NIC.blended.ice.monthly.clim.grb] + + # MERRA2 Aerosol Climatology + - [$(FIX_aer)/merra2.aerclim.2003-2014.m01.nc, $(DATA)/aeroclim.m01.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m02.nc, $(DATA)/aeroclim.m02.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m03.nc, $(DATA)/aeroclim.m03.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m04.nc, $(DATA)/aeroclim.m04.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m05.nc, $(DATA)/aeroclim.m05.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m06.nc, $(DATA)/aeroclim.m06.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m07.nc, $(DATA)/aeroclim.m07.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m08.nc, $(DATA)/aeroclim.m08.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m09.nc, $(DATA)/aeroclim.m09.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m10.nc, $(DATA)/aeroclim.m10.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m11.nc, $(DATA)/aeroclim.m11.nc] + - [$(FIX_aer)/merra2.aerclim.2003-2014.m12.nc, $(DATA)/aeroclim.m12.nc] + + # Optical depth + - [$(FIX_lut)/optics_BC.v1_3.dat, $(DATA)/optics_BC.dat] + - [$(FIX_lut)/optics_DU.v15_3.dat, $(DATA)/optics_DU.dat] + - [$(FIX_lut)/optics_OC.v1_3.dat, $(DATA)/optics_OC.dat] + - [$(FIX_lut)/optics_SS.v3_3.dat, $(DATA)/optics_SS.dat] + - [$(FIX_lut)/optics_SU.v1_3.dat, $(DATA)/optics_SU.dat] + + # fd_nems.yaml file + - [$(HOMEgfs)/sorc/ufs_model.fd/tests/parm/fd_nems.yaml, $(DATA)/] diff --git a/parm/ufs/fix/gfs/land.fixed_files.yaml b/parm/ufs/fix/gfs/land.fixed_files.yaml new file mode 100644 index 0000000000..ab93ff27a6 --- /dev/null +++ b/parm/ufs/fix/gfs/land.fixed_files.yaml @@ -0,0 +1,58 @@ +copy: + + # Files from FIX_orog/C??.mx??_frac/fix_sfc + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).facsf.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).maximum_snow_albedo.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).slope_type.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).snowfree_albedo.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).soil_type.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).substrate_temperature.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_greenness.tile6.nc, $(DATA)/] + + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile1.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile2.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile3.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile4.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile5.nc, $(DATA)/] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/fix_sfc/$(atm_res).vegetation_type.tile6.nc, $(DATA)/] diff --git a/parm/ufs/fix/gfs/ocean.fixed_files.yaml b/parm/ufs/fix/gfs/ocean.fixed_files.yaml new file mode 100644 index 0000000000..801f070c49 --- /dev/null +++ b/parm/ufs/fix/gfs/ocean.fixed_files.yaml @@ -0,0 +1,10 @@ +copy: + + # Orography data tile files + # The following are for "frac_grid = .true." + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile1.nc, $(DATA)/INPUT/oro_data.tile1.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile2.nc, $(DATA)/INPUT/oro_data.tile2.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile3.nc, $(DATA)/INPUT/oro_data.tile3.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile4.nc, $(DATA)/INPUT/oro_data.tile4.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile5.nc, $(DATA)/INPUT/oro_data.tile5.nc] + - [$(FIX_orog)/$(atm_res).mx$(ocn_res)_frac/oro_$(atm_res).mx$(ocn_res).tile6.nc, $(DATA)/INPUT/oro_data.tile6.nc] diff --git a/parm/wave/bull_awips_gfswave b/parm/wave/bull_awips_gfswave new file mode 100644 index 0000000000..87aa19fe48 --- /dev/null +++ b/parm/wave/bull_awips_gfswave @@ -0,0 +1,496 @@ +# Gulf of Alaska (AG) Spectral data (4) near S/SW Alaska Anchorage (8) +export b46001=AGGA48_KWBJ_OSBM01 +export b46066=AGGA48_KWBJ_OSBM02 +export b46061=AGGA48_KWBJ_OSBM03 +export b46075=AGGA48_KWBJ_OSBM04 +export b46076=AGGA48_KWBJ_OSBM05 +export b46078=AGGA48_KWBJ_OSBM06 +export b46106=AGGA48_KWBJ_OSBM07 +export b46080=AGGA48_KWBJ_OSBM08 +export b46108=AGGA48_KWBJ_OSBM09 +export b46021=AGGA48_KWBJ_OSBM10 +export b46060=AGGA48_KWBJ_OSBM11 +export b46077=AGGA48_KWBJ_OSBM12 +export b46079=AGGA48_KWBJ_OSBM13 +export b46105=AGGA48_KWBJ_OSBM14 +export b46107=AGGA48_KWBJ_OSBM15 +export b46265=AGGA48_KWBJ_OSBM16 +# Gulf of Alaska (AG) Spectral data (4) near Alaska Panhandle and NBC (7) +export b46004=AGGA47_KWBJ_OSBM01 +export b46184=AGGA47_KWBJ_OSBM02 +export b46082=AGGA47_KWBJ_OSBM03 +export b46083=AGGA47_KWBJ_OSBM04 +export b46084=AGGA47_KWBJ_OSBM05 +export b46085=AGGA47_KWBJ_OSBM06 +export b46205=AGGA47_KWBJ_OSBM07 +export b46145=AGGA47_KWBJ_OSBM08 +export b46147=AGGA47_KWBJ_OSBM09 +export b46183=AGGA47_KWBJ_OSBM10 +export b46185=AGGA47_KWBJ_OSBM11 +export b46204=AGGA47_KWBJ_OSBM12 +export b46207=AGGA47_KWBJ_OSBM13 +export b46208=AGGA47_KWBJ_OSBM14 +export b46138=AGGA47_KWBJ_OSBM15 +# Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) +export b46002=AGPZ46_KWBJ_OSBM01 +export b46006=AGPZ46_KWBJ_OSBM02 +export b46059=AGPZ46_KWBJ_OSBM03 +export b46011=AGPZ46_KWBJ_OSBM04 +export b46012=AGPZ46_KWBJ_OSBM05 +export b46013=AGPZ46_KWBJ_OSBM06 +export b46014=AGPZ46_KWBJ_OSBM07 +export b46022=AGPZ46_KWBJ_OSBM08 +export b46023=AGPZ46_KWBJ_OSBM09 +export b46026=AGPZ46_KWBJ_OSBM10 +export b46027=AGPZ46_KWBJ_OSBM11 +export b46015=AGPZ46_KWBJ_OSBM12 +export b46025=AGPZ46_KWBJ_OSBM13 +export b46028=AGPZ46_KWBJ_OSBM14 +export b46030=AGPZ46_KWBJ_OSBM15 +export b46042=AGPZ46_KWBJ_OSBM16 +export b46047=AGPZ46_KWBJ_OSBM17 +export b46050=AGPZ46_KWBJ_OSBM18 +export b46053=AGPZ46_KWBJ_OSBM19 +export b46054=AGPZ46_KWBJ_OSBM20 +export b46062=AGPZ46_KWBJ_OSBM21 +export b46063=AGPZ46_KWBJ_OSBM22 +export b46069=AGPZ46_KWBJ_OSBM23 +export b46086=AGPZ46_KWBJ_OSBM24 +export b46089=AGPZ46_KWBJ_OSBM25 +export b46213=AGPZ46_KWBJ_OSBM26 +export b46214=AGPZ46_KWBJ_OSBM27 +export b46216=AGPZ46_KWBJ_OSBM28 +export b46217=AGPZ46_KWBJ_OSBM29 +export b46218=AGPZ46_KWBJ_OSBM30 +export b46219=AGPZ46_KWBJ_OSBM31 +export b46221=AGPZ46_KWBJ_OSBM32 +export b46222=AGPZ46_KWBJ_OSBM33 +export b46223=AGPZ46_KWBJ_OSBM34 +export b46224=AGPZ46_KWBJ_OSBM35 +export b46225=AGPZ46_KWBJ_OSBM36 +export b46227=AGPZ46_KWBJ_OSBM37 +export b46229=AGPZ46_KWBJ_OSBM38 +export b46231=AGPZ46_KWBJ_OSBM39 +export b46232=AGPZ46_KWBJ_OSBM40 +export b46215=AGPZ46_KWBJ_OSBM41 +export b46236=AGPZ46_KWBJ_OSBM42 +export b46237=AGPZ46_KWBJ_OSBM43 +export b46238=AGPZ46_KWBJ_OSBM44 +export b46239=AGPZ46_KWBJ_OSBM45 +export b46240=AGPZ46_KWBJ_OSBM46 +export b46243=AGPZ46_KWBJ_OSBM47 +export b46244=AGPZ46_KWBJ_OSBM48 +export b46246=AGPZ46_KWBJ_OSBM49 +export b46248=AGPZ46_KWBJ_OSBM50 +export b46024=AGPZ46_KWBJ_OSBM51 +export b46091=AGPZ46_KWBJ_OSBM52 +export b46092=AGPZ46_KWBJ_OSBM53 +export b46093=AGPZ46_KWBJ_OSBM54 +export b46094=AGPZ46_KWBJ_OSBM55 +export b46097=AGPZ46_KWBJ_OSBM56 +export b46098=AGPZ46_KWBJ_OSBM57 +export b46114=AGPZ46_KWBJ_OSBM58 +export b46212=AGPZ46_KWBJ_OSBM59 +export b46226=AGPZ46_KWBJ_OSBM60 +export b46233=AGPZ46_KWBJ_OSBM61 +export b46235=AGPZ46_KWBJ_OSBM62 +export b46242=AGPZ46_KWBJ_OSBM63 +export b46247=AGPZ46_KWBJ_OSBM64 +export b46249=AGPZ46_KWBJ_OSBM65 +export b46250=AGPZ46_KWBJ_OSBM66 +export b46251=AGPZ46_KWBJ_OSBM67 +export b46252=AGPZ46_KWBJ_OSBM68 +export b46253=AGPZ46_KWBJ_OSBM69 +export b46254=AGPZ46_KWBJ_OSBM70 +export b46255=AGPZ46_KWBJ_OSBM71 +export b46256=AGPZ46_KWBJ_OSBM72 +export b46257=AGPZ46_KWBJ_OSBM73 +export b46258=AGPZ46_KWBJ_OSBM74 +export b46259=AGPZ46_KWBJ_OSBM75 +export b46262=AGPZ46_KWBJ_OSBM76 +# Eastern Pacific (PZ) spectral data (4) near Alaska Panhandle and NBC (7) +export b46005=AGPZ47_KWBJ_OSBM01 +export b46036=AGPZ47_KWBJ_OSBM02 +export b46132=AGPZ47_KWBJ_OSBM03 +export b46206=AGPZ47_KWBJ_OSBM04 +export b46029=AGPZ47_KWBJ_OSBM05 +export b46041=AGPZ47_KWBJ_OSBM06 +export b46087=AGPZ47_KWBJ_OSBM07 +export b46211=AGPZ47_KWBJ_OSBM08 +export b46088=AGPZ47_KWBJ_OSBM09 +export b46096=AGPZ47_KWBJ_OSBM10 +export b46099=AGPZ47_KWBJ_OSBM11 +export b46100=AGPZ47_KWBJ_OSBM12 +export b46119=AGPZ47_KWBJ_OSBM13 +export b46127=AGPZ47_KWBJ_OSBM14 +export b46139=AGPZ47_KWBJ_OSBM15 +export b46264=AGPZ47_KWBJ_OSBM16 +# North Pacific and Behring Sea (PN) spectra (4) near S/SW Alaska Anchorage (8) +export b46035=AGPN48_KWBJ_OSBM01 +export b46070=AGPN48_KWBJ_OSBM02 +export b46073=AGPN48_KWBJ_OSBM03 +export b46071=AGPN48_KWBJ_OSBM04 +export b46072=AGPN48_KWBJ_OSBM05 +export b46020=AGPN48_KWBJ_OSBM06 +# Hawaiian waters (HW) spectra (4) in Pacific Ocean and Pacific Isles (0) +export b51001=AGHW40_KWBJ_OSBM01 +export b51002=AGHW40_KWBJ_OSBM02 +export b51003=AGHW40_KWBJ_OSBM03 +export b51004=AGHW40_KWBJ_OSBM04 +export b51201=AGHW40_KWBJ_OSBM05 +export b51202=AGHW40_KWBJ_OSBM06 +export b51000=AGHW40_KWBJ_OSBM07 +export b51100=AGHW40_KWBJ_OSBM08 +export b51101=AGHW40_KWBJ_OSBM09 +export b51203=AGHW40_KWBJ_OSBM10 +export b51204=AGHW40_KWBJ_OSBM11 +export b51205=AGHW40_KWBJ_OSBM12 +export b51206=AGHW40_KWBJ_OSBM13 +export b51207=AGHW40_KWBJ_OSBM14 +export b51028=AGHW40_KWBJ_OSBM15 +export b51200=AGHW40_KWBJ_OSBM16 +export b51208=AGHW40_KWBJ_OSBM17 +export b51209=AGHW40_KWBJ_OSBM18 +export b51210=AGHW40_KWBJ_OSBM19 +export b52212=AGHW40_KWBJ_OSBM20 +export b51211=AGHW40_KWBJ_OSBM21 +export b51212=AGHW40_KWBJ_OSBM22 +export b51213=AGHW40_KWBJ_OSBM23 +# Western Pacific (PW) spectra (4) in Pacific Ocean and Pacific Isles (0) +export b52200=AGPW40_KWBJ_OSBM01 +export b22101=AGPW40_KWBJ_OSBM02 +export b22102=AGPW40_KWBJ_OSBM03 +export b22103=AGPW40_KWBJ_OSBM04 +export b22104=AGPW40_KWBJ_OSBM05 +export b22105=AGPW40_KWBJ_OSBM06 +export b52201=AGPW40_KWBJ_OSBM07 +export b52202=AGPW40_KWBJ_OSBM08 +export b52211=AGPW40_KWBJ_OSBM09 +export b21178=AGPW40_KWBJ_OSBM10 +export b21229=AGPW40_KWBJ_OSBM11 +export b22108=AGPW40_KWBJ_OSBM12 +export b22184=AGPW40_KWBJ_OSBM13 +export b22185=AGPW40_KWBJ_OSBM14 +export b22186=AGPW40_KWBJ_OSBM15 +export b22187=AGPW40_KWBJ_OSBM16 +export b22188=AGPW40_KWBJ_OSBM17 +export b22189=AGPW40_KWBJ_OSBM18 +export b22190=AGPW40_KWBJ_OSBM19 +# South Pacific (PS) in Pacific Ocean and Pacific Isles (0) +export b55020=AGPS40_KWBJ_OSBM01 +export b55033=AGPS40_KWBJ_OSBM02 +export b55035=AGPS40_KWBJ_OSBM03 +export b55039=AGPS40_KWBJ_OSBM04 +# Gulf of Mexico (GX) spectra (4) south from NC and Puerto Rico (2) +export b42001=AGGX42_KWBJ_OSBM01 +export b42002=AGGX42_KWBJ_OSBM02 +export b42003=AGGX42_KWBJ_OSBM03 +export b42007=AGGX42_KWBJ_OSBM04 +export b42019=AGGX42_KWBJ_OSBM05 +export b42020=AGGX42_KWBJ_OSBM06 +export b42035=AGGX42_KWBJ_OSBM07 +export b42036=AGGX42_KWBJ_OSBM08 +export b42039=AGGX42_KWBJ_OSBM09 +export b42040=AGGX42_KWBJ_OSBM10 +export b42041=AGGX42_KWBJ_OSBM11 +export b42038=AGGX42_KWBJ_OSBM12 +export b42055=AGGX42_KWBJ_OSBM13 +export b42099=AGGX42_KWBJ_OSBM14 +export b42012=AGGX42_KWBJ_OSBM15 +export b42887=AGGX42_KWBJ_OSBM16 +export b42013=AGGX42_KWBJ_OSBM17 +export b42014=AGGX42_KWBJ_OSBM18 +export b42021=AGGX42_KWBJ_OSBM19 +export b42022=AGGX42_KWBJ_OSBM20 +export b42023=AGGX42_KWBJ_OSBM21 +export b42043=AGGX42_KWBJ_OSBM22 +export b42044=AGGX42_KWBJ_OSBM23 +export b42045=AGGX42_KWBJ_OSBM24 +export b42046=AGGX42_KWBJ_OSBM25 +export b42047=AGGX42_KWBJ_OSBM26 +export b42067=AGGX42_KWBJ_OSBM27 +export b42097=AGGX42_KWBJ_OSBM28 +export b42098=AGGX42_KWBJ_OSBM29 +export b42360=AGGX42_KWBJ_OSBM30 +export b42361=AGGX42_KWBJ_OSBM31 +export b42362=AGGX42_KWBJ_OSBM32 +export b42363=AGGX42_KWBJ_OSBM33 +export b42364=AGGX42_KWBJ_OSBM34 +export b42365=AGGX42_KWBJ_OSBM35 +export b42369=AGGX42_KWBJ_OSBM36 +export b42370=AGGX42_KWBJ_OSBM37 +export b42374=AGGX42_KWBJ_OSBM38 +export b42375=AGGX42_KWBJ_OSBM39 +export b42376=AGGX42_KWBJ_OSBM40 +export b42390=AGGX42_KWBJ_OSBM41 +export b42392=AGGX42_KWBJ_OSBM42 +export b42394=AGGX42_KWBJ_OSBM43 +export b42395=AGGX42_KWBJ_OSBM44 +# Caribbean Sea (CA) spectra (4) south from NC and Puerto Rico (2) +export b42056=AGCA42_KWBJ_OSBM01 +export b42057=AGCA42_KWBJ_OSBM02 +export b42058=AGCA42_KWBJ_OSBM03 +export b42080=AGCA42_KWBJ_OSBM04 +export b42059=AGCA42_KWBJ_OSBM05 +export b32012=AGCA42_KWBJ_OSBM06 +export b42060=AGCA42_KWBJ_OSBM07 +export b41194=AGCA42_KWBJ_OSBM08 +export b42085=AGCA42_KWBJ_OSBM09 +export b42089=AGCA42_KWBJ_OSBM10 +export b41052=AGCA42_KWBJ_OSBM11 +export b41051=AGCA42_KWBJ_OSBM12 +export b41056=AGCA42_KWBJ_OSBM13 +export b41115=AGCA42_KWBJ_OSBM14 +export b41117=AGCA42_KWBJ_OSBM15 +export b42079=AGCA42_KWBJ_OSBM16 +export b42086=AGCA42_KWBJ_OSBM17 +export b42095=AGCA42_KWBJ_OSBM18 +# Western Atlantic (NT) spectra (4) south from NC and Puerto Rico (2) +export b41001=AGNT42_KWBJ_OSBM01 +export b41002=AGNT42_KWBJ_OSBM02 +export b41004=AGNT42_KWBJ_OSBM03 +export b41008=AGNT42_KWBJ_OSBM04 +export b41009=AGNT42_KWBJ_OSBM05 +export b41010=AGNT42_KWBJ_OSBM06 +export b41012=AGNT42_KWBJ_OSBM07 +export b41013=AGNT42_KWBJ_OSBM08 +export b41025=AGNT42_KWBJ_OSBM09 +export b41035=AGNT42_KWBJ_OSBM10 +export b41036=AGNT42_KWBJ_OSBM11 +export b41043=AGNT42_KWBJ_OSBM12 +export b41046=AGNT42_KWBJ_OSBM13 +export b41047=AGNT42_KWBJ_OSBM14 +export b41048=AGNT42_KWBJ_OSBM15 +export b41112=AGNT42_KWBJ_OSBM16 +export b41113=AGNT42_KWBJ_OSBM17 +export b41114=AGNT42_KWBJ_OSBM18 +export b44014=AGNT42_KWBJ_OSBM19 +export b41037=AGNT42_KWBJ_OSBM20 +export b41038=AGNT42_KWBJ_OSBM21 +export b41049=AGNT42_KWBJ_OSBM22 +export b41044=AGNT42_KWBJ_OSBM23 +export b41109=AGNT42_KWBJ_OSBM24 +export b41110=AGNT42_KWBJ_OSBM25 +export b41111=AGNT42_KWBJ_OSBM26 +export b41053=AGNT42_KWBJ_OSBM27 +export b41058=AGNT42_KWBJ_OSBM28 +export b41024=AGNT42_KWBJ_OSBM29 +export b41027=AGNT42_KWBJ_OSBM30 +export b41029=AGNT42_KWBJ_OSBM31 +export b41030=AGNT42_KWBJ_OSBM32 +export b41033=AGNT42_KWBJ_OSBM33 +export b41061=AGNT42_KWBJ_OSBM34 +export b41062=AGNT42_KWBJ_OSBM35 +export b41063=AGNT42_KWBJ_OSBM36 +export b41064=AGNT42_KWBJ_OSBM37 +export b41108=AGNT42_KWBJ_OSBM38 +export b41159=AGNT42_KWBJ_OSBM39 +export b44056=AGNT42_KWBJ_OSBM40 +# Western Atlantic (NT) spectra (4) NE states north of VA (1) +export b44138=AGNT41_KWBJ_OSBM01 +export b44011=AGNT41_KWBJ_OSBM02 +export b44141=AGNT41_KWBJ_OSBM03 +export b44142=AGNT41_KWBJ_OSBM04 +export bWRB07=AGNT41_KWBJ_OSBM05 +export b44137=AGNT41_KWBJ_OSBM06 +export b44139=AGNT41_KWBJ_OSBM07 +export b44140=AGNT41_KWBJ_OSBM08 +export b44150=AGNT41_KWBJ_OSBM09 +export b44004=AGNT41_KWBJ_OSBM10 +export b44005=AGNT41_KWBJ_OSBM11 +export b44008=AGNT41_KWBJ_OSBM12 +export b44009=AGNT41_KWBJ_OSBM13 +export b44017=AGNT41_KWBJ_OSBM14 +export b44018=AGNT41_KWBJ_OSBM15 +export b44025=AGNT41_KWBJ_OSBM16 +export b44070=AGNT41_KWBJ_OSBM17 +export b44024=AGNT41_KWBJ_OSBM18 +export b44027=AGNT41_KWBJ_OSBM19 +export b44037=AGNT41_KWBJ_OSBM20 +export b44038=AGNT41_KWBJ_OSBM21 +export b44251=AGNT41_KWBJ_OSBM22 +export b44255=AGNT41_KWBJ_OSBM23 +export b44099=AGNT41_KWBJ_OSBM24 +export b44100=AGNT41_KWBJ_OSBM25 +export b44066=AGNT41_KWBJ_OSBM26 +export b44093=AGNT41_KWBJ_OSBM27 +export b44095=AGNT41_KWBJ_OSBM28 +export b44096=AGNT41_KWBJ_OSBM29 +export b44097=AGNT41_KWBJ_OSBM30 +export b44098=AGNT41_KWBJ_OSBM31 +export b44007=AGNT41_KWBJ_OSBM32 +export b44013=AGNT41_KWBJ_OSBM33 +export b44020=AGNT41_KWBJ_OSBM34 +export b44029=AGNT41_KWBJ_OSBM35 +export b44030=AGNT41_KWBJ_OSBM36 +export b44031=AGNT41_KWBJ_OSBM37 +export b44032=AGNT41_KWBJ_OSBM38 +export b44033=AGNT41_KWBJ_OSBM39 +export b44034=AGNT41_KWBJ_OSBM40 +export b44039=AGNT41_KWBJ_OSBM41 +export b44040=AGNT41_KWBJ_OSBM42 +export b44043=AGNT41_KWBJ_OSBM43 +export b44054=AGNT41_KWBJ_OSBM44 +export b44055=AGNT41_KWBJ_OSBM45 +export b44058=AGNT41_KWBJ_OSBM46 +export b44060=AGNT41_KWBJ_OSBM47 +export b44061=AGNT41_KWBJ_OSBM48 +export b44062=AGNT41_KWBJ_OSBM49 +export b44063=AGNT41_KWBJ_OSBM50 +export b44064=AGNT41_KWBJ_OSBM51 +export b44065=AGNT41_KWBJ_OSBM52 +export b44072=AGNT41_KWBJ_OSBM53 +export b44089=AGNT41_KWBJ_OSBM54 +export b44090=AGNT41_KWBJ_OSBM55 +export b44091=AGNT41_KWBJ_OSBM56 +export b44092=AGNT41_KWBJ_OSBM57 +export b44094=AGNT41_KWBJ_OSBM58 +export b44172=AGNT41_KWBJ_OSBM59 +export b44235=AGNT41_KWBJ_OSBM60 +export b44087=AGNT41_KWBJ_OSBM61 +# Western Atlantic (NT) spectra (4) near South America (3) +export b31201=AGNT43_KWBJ_OSBM01 +export b31052=AGNT43_KWBJ_OSBM02 +export b31260=AGNT43_KWBJ_OSBM03 +export b31374=AGNT43_KWBJ_OSBM04 +export b31051=AGNT43_KWBJ_OSBM05 +export b31053=AGNT43_KWBJ_OSBM06 +export b31375=AGNT43_KWBJ_OSBM07 +# Tropical Belt (XT) spectra (4) near South America (3) +export b41040=AGXT43_KWBJ_OSBM01 +export b41041=AGXT43_KWBJ_OSBM02 +export b41100=AGXT43_KWBJ_OSBM03 +export b41101=AGXT43_KWBJ_OSBM04 +export b41060=AGXT43_KWBJ_OSBM05 +export b42087=AGXT43_KWBJ_OSBM06 +export b42088=AGXT43_KWBJ_OSBM07 +# Tropical Belt (XT) spectra (4) in Pacific Ocean and Pacific Isles (0) +export b43010=AGXT40_KWBJ_OSBM01 +export b52009=AGXT40_KWBJ_OSBM02 +# Eastern Atlantic (ET) spectra (3) near Europe (3) +export b62001=AGET43_KWBJ_OSBM01 +export b62002=AGET43_KWBJ_OSBM02 +export b62029=AGET43_KWBJ_OSBM03 +export b62023=AGET43_KWBJ_OSBM04 +export b62052=AGET43_KWBJ_OSBM05 +export b62081=AGET43_KWBJ_OSBM06 +export b62090=AGET43_KWBJ_OSBM07 +export b62091=AGET43_KWBJ_OSBM08 +export b62092=AGET43_KWBJ_OSBM09 +export b62093=AGET43_KWBJ_OSBM10 +export b62094=AGET43_KWBJ_OSBM11 +export b62095=AGET43_KWBJ_OSBM12 +export b62103=AGET43_KWBJ_OSBM13 +export b62105=AGET43_KWBJ_OSBM14 +export b62106=AGET43_KWBJ_OSBM15 +export b62107=AGET43_KWBJ_OSBM16 +export b62108=AGET43_KWBJ_OSBM17 +export b62163=AGET43_KWBJ_OSBM18 +export b62301=AGET43_KWBJ_OSBM19 +export b62303=AGET43_KWBJ_OSBM20 +export b62305=AGET43_KWBJ_OSBM21 +export b62170=AGET43_KWBJ_OSBM22 +export b64045=AGET43_KWBJ_OSBM23 +export b64046=AGET43_KWBJ_OSBM24 +export bTFGSK=AGET43_KWBJ_OSBM25 +export bTFHFN=AGET43_KWBJ_OSBM26 +export bTFSRT=AGET43_KWBJ_OSBM27 +export bLF3F=AGET43_KWBJ_OSBM28 +export b62026=AGET43_KWBJ_OSBM29 +export b62109=AGET43_KWBJ_OSBM30 +export b62111=AGET43_KWBJ_OSBM31 +export b62112=AGET43_KWBJ_OSBM32 +export b62116=AGET43_KWBJ_OSBM33 +export b62117=AGET43_KWBJ_OSBM34 +export b62119=AGET43_KWBJ_OSBM35 +export b62128=AGET43_KWBJ_OSBM36 +export b62132=AGET43_KWBJ_OSBM37 +export b62133=AGET43_KWBJ_OSBM38 +export b62142=AGET43_KWBJ_OSBM39 +export b62143=AGET43_KWBJ_OSBM40 +export b62144=AGET43_KWBJ_OSBM41 +export b62145=AGET43_KWBJ_OSBM42 +export b62152=AGET43_KWBJ_OSBM43 +export b62162=AGET43_KWBJ_OSBM44 +export b62164=AGET43_KWBJ_OSBM45 +export b62304=AGET43_KWBJ_OSBM46 +export b63055=AGET43_KWBJ_OSBM47 +export b63056=AGET43_KWBJ_OSBM48 +export b63057=AGET43_KWBJ_OSBM49 +export b63103=AGET43_KWBJ_OSBM50 +export b63108=AGET43_KWBJ_OSBM51 +export b63110=AGET43_KWBJ_OSBM52 +export b63112=AGET43_KWBJ_OSBM53 +export b63113=AGET43_KWBJ_OSBM54 +export b63115=AGET43_KWBJ_OSBM55 +export bLF3J=AGET43_KWBJ_OSBM56 +export bLF4B=AGET43_KWBJ_OSBM57 +export bLF4H=AGET43_KWBJ_OSBM58 +export bLF4C=AGET43_KWBJ_OSBM59 +export bLF5U=AGET43_KWBJ_OSBM60 +export bEURO=AGET43_KWBJ_OSBM61 +export bK13=AGET43_KWBJ_OSBM62 +export b62024=AGET43_KWBJ_OSBM63 +export b62082=AGET43_KWBJ_OSBM64 +export b62084=AGET43_KWBJ_OSBM65 +export b62085=AGET43_KWBJ_OSBM66 +export b13130=AGET43_KWBJ_OSBM67 +export b13131=AGET43_KWBJ_OSBM68 +export b62118=AGET43_KWBJ_OSBM69 +export b62146=AGET43_KWBJ_OSBM70 +export bBSH01=AGET43_KWBJ_OSBM71 +export bBSH02=AGET43_KWBJ_OSBM72 +export bBSH03=AGET43_KWBJ_OSBM73 +export bBSH04=AGET43_KWBJ_OSBM74 +export bBSH05=AGET43_KWBJ_OSBM75 +# Arctic Ocean (AC) spectra (4) non-descript (3) +export bTFBLK=AGAC43_KWBJ_OSBM01 +export bTFGRS=AGAC43_KWBJ_OSBM02 +export bTFKGR=AGAC43_KWBJ_OSBM03 +export bLF3N=AGAC43_KWBJ_OSBM04 +export bLF5T=AGAC43_KWBJ_OSBM05 +export bLDWR=AGAC43_KWBJ_OSBM06 +export b3FYT=AGAC43_KWBJ_OSBM07 +export bLFB1=AGAC43_KWBJ_OSBM08 +export bLFB2=AGAC43_KWBJ_OSBM09 +export b64071=AGAC43_KWBJ_OSBM10 +export b48012=AGAC43_KWBJ_OSBM11 +export b48114=AGAC43_KWBJ_OSBM12 +export b48211=AGAC43_KWBJ_OSBM13 +export b48212=AGAC43_KWBJ_OSBM14 +export b48213=AGAC43_KWBJ_OSBM15 +export b48214=AGAC43_KWBJ_OSBM16 +export b48216=AGAC43_KWBJ_OSBM17 +# Indian Ocean (I) spectra (4) non-descript (5) +export b23092=AGIO45_KWBJ_OSBM01 +export b23093=AGIO45_KWBJ_OSBM02 +export b23094=AGIO45_KWBJ_OSBM03 +export b23096=AGIO45_KWBJ_OSBM04 +export b23097=AGIO45_KWBJ_OSBM05 +export b23098=AGIO45_KWBJ_OSBM06 +export b23099=AGIO45_KWBJ_OSBM07 +export b23100=AGIO45_KWBJ_OSBM08 +export b23101=AGIO45_KWBJ_OSBM09 +export b23168=AGIO45_KWBJ_OSBM10 +export b23169=AGIO45_KWBJ_OSBM11 +export b23170=AGIO45_KWBJ_OSBM12 +export b23172=AGIO45_KWBJ_OSBM13 +export b23173=AGIO45_KWBJ_OSBM14 +export b23174=AGIO45_KWBJ_OSBM15 +export b56002=AGIO45_KWBJ_OSBM16 +export b56005=AGIO45_KWBJ_OSBM17 +export b56006=AGIO45_KWBJ_OSBM18 +export b56007=AGIO45_KWBJ_OSBM19 +export bAGULHAS_FA=AGIO45_KWBJ_OSBM20 +export b56010=AGIO45_KWBJ_OSBM21 +export b56012=AGIO45_KWBJ_OSBM22 +export b23167=AGIO45_KWBJ_OSBM23 +export b23171=AGIO45_KWBJ_OSBM24 +export b23451=AGIO45_KWBJ_OSBM25 +export b23455=AGIO45_KWBJ_OSBM26 +export b23456=AGIO45_KWBJ_OSBM27 +export b23491=AGIO45_KWBJ_OSBM28 +export b23492=AGIO45_KWBJ_OSBM29 +export b23493=AGIO45_KWBJ_OSBM30 +export b23494=AGIO45_KWBJ_OSBM31 +export b23495=AGIO45_KWBJ_OSBM32 diff --git a/parm/wave/grib2_gfswave.ao_9km.f000 b/parm/wave/grib2_gfswave.ao_9km.f000 new file mode 100644 index 0000000000..bd8c07adfa --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f000 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTA88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTA88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTA88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTA88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTA88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTA88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTA88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTA88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f003 b/parm/wave/grib2_gfswave.ao_9km.f003 new file mode 100644 index 0000000000..02a8fae550 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f003 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTB88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTB88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTB88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTB88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTB88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTB88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTB88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTB88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f006 b/parm/wave/grib2_gfswave.ao_9km.f006 new file mode 100644 index 0000000000..9166dac9aa --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f006 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTC88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTC88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTC88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTC88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTC88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTC88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTC88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTC88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f009 b/parm/wave/grib2_gfswave.ao_9km.f009 new file mode 100644 index 0000000000..ad03ea4703 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f009 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTD88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTD88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTD88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTD88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTD88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTD88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTD88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTD88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f012 b/parm/wave/grib2_gfswave.ao_9km.f012 new file mode 100644 index 0000000000..b7e1b8f637 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f012 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTE88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTE88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTE88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTE88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTE88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTE88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTE88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTE88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f015 b/parm/wave/grib2_gfswave.ao_9km.f015 new file mode 100644 index 0000000000..bebde1b724 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f015 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTF88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTF88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTF88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTF88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTF88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTF88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTF88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTF88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f018 b/parm/wave/grib2_gfswave.ao_9km.f018 new file mode 100644 index 0000000000..98e94ed3ff --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f018 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTG88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTG88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTG88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTG88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTG88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTG88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTG88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTG88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f021 b/parm/wave/grib2_gfswave.ao_9km.f021 new file mode 100644 index 0000000000..eaedce9ea6 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f021 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTH88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTH88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTH88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTH88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTH88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTH88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTH88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTH88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f024 b/parm/wave/grib2_gfswave.ao_9km.f024 new file mode 100644 index 0000000000..64dfd856b1 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f024 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f027 b/parm/wave/grib2_gfswave.ao_9km.f027 new file mode 100644 index 0000000000..080077a2de --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f027 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f030 b/parm/wave/grib2_gfswave.ao_9km.f030 new file mode 100644 index 0000000000..fc7a3a350e --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f030 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f033 b/parm/wave/grib2_gfswave.ao_9km.f033 new file mode 100644 index 0000000000..505911229f --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f033 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f036 b/parm/wave/grib2_gfswave.ao_9km.f036 new file mode 100644 index 0000000000..56a5e0e2f2 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f036 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f039 b/parm/wave/grib2_gfswave.ao_9km.f039 new file mode 100644 index 0000000000..0693f2bc40 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f039 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f042 b/parm/wave/grib2_gfswave.ao_9km.f042 new file mode 100644 index 0000000000..cac1f66a6c --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f042 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f045 b/parm/wave/grib2_gfswave.ao_9km.f045 new file mode 100644 index 0000000000..f9a99d13bf --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f045 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f048 b/parm/wave/grib2_gfswave.ao_9km.f048 new file mode 100644 index 0000000000..b570ab7c4d --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f048 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f051 b/parm/wave/grib2_gfswave.ao_9km.f051 new file mode 100644 index 0000000000..9c700657ca --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f051 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f054 b/parm/wave/grib2_gfswave.ao_9km.f054 new file mode 100644 index 0000000000..4043a5e515 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f054 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f057 b/parm/wave/grib2_gfswave.ao_9km.f057 new file mode 100644 index 0000000000..50f40538e4 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f057 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f060 b/parm/wave/grib2_gfswave.ao_9km.f060 new file mode 100644 index 0000000000..e696f31665 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f060 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f063 b/parm/wave/grib2_gfswave.ao_9km.f063 new file mode 100644 index 0000000000..c03ee50a2e --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f063 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f066 b/parm/wave/grib2_gfswave.ao_9km.f066 new file mode 100644 index 0000000000..842ebdac19 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f066 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f069 b/parm/wave/grib2_gfswave.ao_9km.f069 new file mode 100644 index 0000000000..2c44dd2bc8 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f069 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f072 b/parm/wave/grib2_gfswave.ao_9km.f072 new file mode 100644 index 0000000000..eb75a8f5af --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f072 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f078 b/parm/wave/grib2_gfswave.ao_9km.f078 new file mode 100644 index 0000000000..c938a909e0 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f078 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f084 b/parm/wave/grib2_gfswave.ao_9km.f084 new file mode 100644 index 0000000000..9f11fc5c18 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f084 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f090 b/parm/wave/grib2_gfswave.ao_9km.f090 new file mode 100644 index 0000000000..f3c52a2171 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f090 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f096 b/parm/wave/grib2_gfswave.ao_9km.f096 new file mode 100644 index 0000000000..df9f5793cd --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f096 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f102 b/parm/wave/grib2_gfswave.ao_9km.f102 new file mode 100644 index 0000000000..1558071b8f --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f102 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f108 b/parm/wave/grib2_gfswave.ao_9km.f108 new file mode 100644 index 0000000000..41543b4d86 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f108 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f114 b/parm/wave/grib2_gfswave.ao_9km.f114 new file mode 100644 index 0000000000..d42dcb3da6 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f114 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f120 b/parm/wave/grib2_gfswave.ao_9km.f120 new file mode 100644 index 0000000000..5b0b3538c3 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f120 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f126 b/parm/wave/grib2_gfswave.ao_9km.f126 new file mode 100644 index 0000000000..148f9a9a12 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f126 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f132 b/parm/wave/grib2_gfswave.ao_9km.f132 new file mode 100644 index 0000000000..9daea35eec --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f132 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f138 b/parm/wave/grib2_gfswave.ao_9km.f138 new file mode 100644 index 0000000000..0b29e8706d --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f138 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f144 b/parm/wave/grib2_gfswave.ao_9km.f144 new file mode 100644 index 0000000000..240f35b7ea --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f144 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f150 b/parm/wave/grib2_gfswave.ao_9km.f150 new file mode 100644 index 0000000000..25d79d2de0 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f150 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f156 b/parm/wave/grib2_gfswave.ao_9km.f156 new file mode 100644 index 0000000000..3f9f9e7cb7 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f156 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f162 b/parm/wave/grib2_gfswave.ao_9km.f162 new file mode 100644 index 0000000000..9948e9d810 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f162 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f168 b/parm/wave/grib2_gfswave.ao_9km.f168 new file mode 100644 index 0000000000..97a4548532 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f168 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f174 b/parm/wave/grib2_gfswave.ao_9km.f174 new file mode 100644 index 0000000000..ebc56be0ae --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f174 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ao_9km.f180 b/parm/wave/grib2_gfswave.ao_9km.f180 new file mode 100644 index 0000000000..527ec3a760 --- /dev/null +++ b/parm/wave/grib2_gfswave.ao_9km.f180 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQTW88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERTW88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' UGRD Surface ',WMOHEAD='EATW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' VGRD Surface ',WMOHEAD='EBTW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECTW88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJTW88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKTW88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELTW88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOTW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMTW88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYTW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENTW88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPTW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f000 b/parm/wave/grib2_gfswave.at_10m.f000 new file mode 100644 index 0000000000..d477dab5b5 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f000 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBA88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBA88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBA88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBA88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBA88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBA88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBA88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBA88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f003 b/parm/wave/grib2_gfswave.at_10m.f003 new file mode 100644 index 0000000000..de559c5259 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f003 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBB88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBB88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBB88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBB88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBB88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBB88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBB88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBB88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f006 b/parm/wave/grib2_gfswave.at_10m.f006 new file mode 100644 index 0000000000..083706ac70 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f006 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBC88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBC88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBC88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBC88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBC88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBC88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBC88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBC88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f009 b/parm/wave/grib2_gfswave.at_10m.f009 new file mode 100644 index 0000000000..a9edacbcc9 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f009 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBD88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBD88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBD88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBD88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBD88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBD88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBD88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBD88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f012 b/parm/wave/grib2_gfswave.at_10m.f012 new file mode 100644 index 0000000000..5a99330ec0 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f012 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBE88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBE88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBE88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBE88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBE88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBE88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBE88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBE88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f015 b/parm/wave/grib2_gfswave.at_10m.f015 new file mode 100644 index 0000000000..ca140775e7 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f015 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBF88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBF88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBF88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBF88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBF88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBF88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBF88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBF88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f018 b/parm/wave/grib2_gfswave.at_10m.f018 new file mode 100644 index 0000000000..edad98ca56 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f018 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBG88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBG88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBG88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBG88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBG88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBG88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBG88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBG88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f021 b/parm/wave/grib2_gfswave.at_10m.f021 new file mode 100644 index 0000000000..fcd2c2ce53 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f021 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBH88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBH88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBH88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBH88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBH88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBH88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBH88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBH88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f024 b/parm/wave/grib2_gfswave.at_10m.f024 new file mode 100644 index 0000000000..45b9d8a4ec --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f024 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f027 b/parm/wave/grib2_gfswave.at_10m.f027 new file mode 100644 index 0000000000..689bf6af9b --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f027 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f030 b/parm/wave/grib2_gfswave.at_10m.f030 new file mode 100644 index 0000000000..a6ced8144d --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f030 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f033 b/parm/wave/grib2_gfswave.at_10m.f033 new file mode 100644 index 0000000000..0a8fc7537c --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f033 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f036 b/parm/wave/grib2_gfswave.at_10m.f036 new file mode 100644 index 0000000000..e886e1578d --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f036 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f039 b/parm/wave/grib2_gfswave.at_10m.f039 new file mode 100644 index 0000000000..30f98c8455 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f039 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f042 b/parm/wave/grib2_gfswave.at_10m.f042 new file mode 100644 index 0000000000..a46567d18e --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f042 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f045 b/parm/wave/grib2_gfswave.at_10m.f045 new file mode 100644 index 0000000000..b7e34b3160 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f045 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f048 b/parm/wave/grib2_gfswave.at_10m.f048 new file mode 100644 index 0000000000..8590d97c8f --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f048 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f051 b/parm/wave/grib2_gfswave.at_10m.f051 new file mode 100644 index 0000000000..4facc85576 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f051 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f054 b/parm/wave/grib2_gfswave.at_10m.f054 new file mode 100644 index 0000000000..56b4b166fc --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f054 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f057 b/parm/wave/grib2_gfswave.at_10m.f057 new file mode 100644 index 0000000000..62f017e4bb --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f057 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f060 b/parm/wave/grib2_gfswave.at_10m.f060 new file mode 100644 index 0000000000..1d36770e68 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f060 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f063 b/parm/wave/grib2_gfswave.at_10m.f063 new file mode 100644 index 0000000000..9bf847403e --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f063 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f066 b/parm/wave/grib2_gfswave.at_10m.f066 new file mode 100644 index 0000000000..45276d44c1 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f066 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f069 b/parm/wave/grib2_gfswave.at_10m.f069 new file mode 100644 index 0000000000..8b729955bc --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f069 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f072 b/parm/wave/grib2_gfswave.at_10m.f072 new file mode 100644 index 0000000000..1434f76cad --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f072 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f078 b/parm/wave/grib2_gfswave.at_10m.f078 new file mode 100644 index 0000000000..5d2e7d8bc8 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f078 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f084 b/parm/wave/grib2_gfswave.at_10m.f084 new file mode 100644 index 0000000000..7b3b2aa731 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f084 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f090 b/parm/wave/grib2_gfswave.at_10m.f090 new file mode 100644 index 0000000000..8ba15ede53 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f090 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f096 b/parm/wave/grib2_gfswave.at_10m.f096 new file mode 100644 index 0000000000..cc07a2d6f5 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f096 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f102 b/parm/wave/grib2_gfswave.at_10m.f102 new file mode 100644 index 0000000000..220c5180a0 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f102 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f108 b/parm/wave/grib2_gfswave.at_10m.f108 new file mode 100644 index 0000000000..d84639c5c1 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f108 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f114 b/parm/wave/grib2_gfswave.at_10m.f114 new file mode 100644 index 0000000000..8503d62ca0 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f114 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f120 b/parm/wave/grib2_gfswave.at_10m.f120 new file mode 100644 index 0000000000..9a4331916f --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f120 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f126 b/parm/wave/grib2_gfswave.at_10m.f126 new file mode 100644 index 0000000000..83b01f5cdd --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f126 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f132 b/parm/wave/grib2_gfswave.at_10m.f132 new file mode 100644 index 0000000000..5ac9fc5277 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f132 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f138 b/parm/wave/grib2_gfswave.at_10m.f138 new file mode 100644 index 0000000000..bcc102965d --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f138 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f144 b/parm/wave/grib2_gfswave.at_10m.f144 new file mode 100644 index 0000000000..144487aad3 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f144 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f150 b/parm/wave/grib2_gfswave.at_10m.f150 new file mode 100644 index 0000000000..d5d68ae51d --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f150 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f156 b/parm/wave/grib2_gfswave.at_10m.f156 new file mode 100644 index 0000000000..05552c9575 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f156 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f162 b/parm/wave/grib2_gfswave.at_10m.f162 new file mode 100644 index 0000000000..5aab798d36 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f162 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f168 b/parm/wave/grib2_gfswave.at_10m.f168 new file mode 100644 index 0000000000..2d660fcc97 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f168 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f174 b/parm/wave/grib2_gfswave.at_10m.f174 new file mode 100644 index 0000000000..1acd3d8d5e --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f174 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.at_10m.f180 b/parm/wave/grib2_gfswave.at_10m.f180 new file mode 100644 index 0000000000..7166559be0 --- /dev/null +++ b/parm/wave/grib2_gfswave.at_10m.f180 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQBW88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERBW88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EABW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBBW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECBW88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJBW88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKBW88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELBW88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOBW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMBW88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYBW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENBW88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPBW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f000 b/parm/wave/grib2_gfswave.ep_10m.f000 new file mode 100644 index 0000000000..f8d065cf4b --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f000 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDA88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDA88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDA88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDA88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDA88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDA88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDA88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDA88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f003 b/parm/wave/grib2_gfswave.ep_10m.f003 new file mode 100644 index 0000000000..115803fd63 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f003 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDB88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDB88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDB88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDB88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDB88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDB88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDB88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDB88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f006 b/parm/wave/grib2_gfswave.ep_10m.f006 new file mode 100644 index 0000000000..065d4288c8 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f006 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDC88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDC88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDC88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDC88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDC88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDC88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDC88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDC88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f009 b/parm/wave/grib2_gfswave.ep_10m.f009 new file mode 100644 index 0000000000..d80dc1b7d3 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f009 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDD88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDD88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDD88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDD88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDD88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDD88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDD88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDD88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f012 b/parm/wave/grib2_gfswave.ep_10m.f012 new file mode 100644 index 0000000000..cc3e77a1da --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f012 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDE88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDE88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDE88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDE88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDE88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDE88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDE88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDE88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f015 b/parm/wave/grib2_gfswave.ep_10m.f015 new file mode 100644 index 0000000000..c6d3895bb7 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f015 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDF88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDF88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDF88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDF88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDF88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDF88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDF88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDF88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f018 b/parm/wave/grib2_gfswave.ep_10m.f018 new file mode 100644 index 0000000000..5208836607 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f018 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDG88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDG88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDG88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDG88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDG88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDG88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDG88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDG88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f021 b/parm/wave/grib2_gfswave.ep_10m.f021 new file mode 100644 index 0000000000..92e9cd6082 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f021 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDH88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDH88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDH88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDH88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDH88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDH88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDH88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDH88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f024 b/parm/wave/grib2_gfswave.ep_10m.f024 new file mode 100644 index 0000000000..a92bba3c82 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f024 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f027 b/parm/wave/grib2_gfswave.ep_10m.f027 new file mode 100644 index 0000000000..d406120246 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f027 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f030 b/parm/wave/grib2_gfswave.ep_10m.f030 new file mode 100644 index 0000000000..ddd9876470 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f030 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f033 b/parm/wave/grib2_gfswave.ep_10m.f033 new file mode 100644 index 0000000000..17b366b526 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f033 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f036 b/parm/wave/grib2_gfswave.ep_10m.f036 new file mode 100644 index 0000000000..dc07f4c40c --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f036 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f039 b/parm/wave/grib2_gfswave.ep_10m.f039 new file mode 100644 index 0000000000..cac056faca --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f039 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f042 b/parm/wave/grib2_gfswave.ep_10m.f042 new file mode 100644 index 0000000000..26e25bda57 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f042 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f045 b/parm/wave/grib2_gfswave.ep_10m.f045 new file mode 100644 index 0000000000..1de3d4f408 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f045 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f048 b/parm/wave/grib2_gfswave.ep_10m.f048 new file mode 100644 index 0000000000..085c0ef3a0 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f048 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f051 b/parm/wave/grib2_gfswave.ep_10m.f051 new file mode 100644 index 0000000000..e5ad1dba9d --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f051 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f054 b/parm/wave/grib2_gfswave.ep_10m.f054 new file mode 100644 index 0000000000..a3f52e7d27 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f054 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f057 b/parm/wave/grib2_gfswave.ep_10m.f057 new file mode 100644 index 0000000000..3899e47823 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f057 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f060 b/parm/wave/grib2_gfswave.ep_10m.f060 new file mode 100644 index 0000000000..a28c999042 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f060 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f063 b/parm/wave/grib2_gfswave.ep_10m.f063 new file mode 100644 index 0000000000..f13e736383 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f063 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f066 b/parm/wave/grib2_gfswave.ep_10m.f066 new file mode 100644 index 0000000000..f598f767a0 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f066 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f069 b/parm/wave/grib2_gfswave.ep_10m.f069 new file mode 100644 index 0000000000..3a05f77135 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f069 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f072 b/parm/wave/grib2_gfswave.ep_10m.f072 new file mode 100644 index 0000000000..482076b5c8 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f072 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f078 b/parm/wave/grib2_gfswave.ep_10m.f078 new file mode 100644 index 0000000000..8b1170193f --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f078 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f084 b/parm/wave/grib2_gfswave.ep_10m.f084 new file mode 100644 index 0000000000..e566a3a375 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f084 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f090 b/parm/wave/grib2_gfswave.ep_10m.f090 new file mode 100644 index 0000000000..5a16bed734 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f090 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f096 b/parm/wave/grib2_gfswave.ep_10m.f096 new file mode 100644 index 0000000000..7810dd8b21 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f096 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f102 b/parm/wave/grib2_gfswave.ep_10m.f102 new file mode 100644 index 0000000000..7e8bdf4ab2 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f102 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f108 b/parm/wave/grib2_gfswave.ep_10m.f108 new file mode 100644 index 0000000000..0844a51d9b --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f108 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f114 b/parm/wave/grib2_gfswave.ep_10m.f114 new file mode 100644 index 0000000000..c53b21d622 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f114 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f120 b/parm/wave/grib2_gfswave.ep_10m.f120 new file mode 100644 index 0000000000..caa597569b --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f120 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f126 b/parm/wave/grib2_gfswave.ep_10m.f126 new file mode 100644 index 0000000000..c2bf8697f2 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f126 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f132 b/parm/wave/grib2_gfswave.ep_10m.f132 new file mode 100644 index 0000000000..f6021aaae1 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f132 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f138 b/parm/wave/grib2_gfswave.ep_10m.f138 new file mode 100644 index 0000000000..303f65efd6 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f138 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f144 b/parm/wave/grib2_gfswave.ep_10m.f144 new file mode 100644 index 0000000000..713fd1ce1a --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f144 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f150 b/parm/wave/grib2_gfswave.ep_10m.f150 new file mode 100644 index 0000000000..35cd044bc5 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f150 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f156 b/parm/wave/grib2_gfswave.ep_10m.f156 new file mode 100644 index 0000000000..a61f769843 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f156 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f162 b/parm/wave/grib2_gfswave.ep_10m.f162 new file mode 100644 index 0000000000..71eb7d1501 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f162 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f168 b/parm/wave/grib2_gfswave.ep_10m.f168 new file mode 100644 index 0000000000..343a165fa9 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f168 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f174 b/parm/wave/grib2_gfswave.ep_10m.f174 new file mode 100644 index 0000000000..cf57aea145 --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f174 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.ep_10m.f180 b/parm/wave/grib2_gfswave.ep_10m.f180 new file mode 100644 index 0000000000..7ce0873b6f --- /dev/null +++ b/parm/wave/grib2_gfswave.ep_10m.f180 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQDW88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERDW88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EADW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBDW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECDW88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJDW88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKDW88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELDW88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EODW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMDW88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYDW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENDW88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPDW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f000 b/parm/wave/grib2_gfswave.glo_30m.f000 new file mode 100644 index 0000000000..66ff96c803 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f000 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAA88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAA88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAA88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAA88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAA88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAA88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAA88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAA88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f003 b/parm/wave/grib2_gfswave.glo_30m.f003 new file mode 100644 index 0000000000..9b5200fe14 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f003 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAB88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAB88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAB88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAB88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAB88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAB88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAB88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAB88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f006 b/parm/wave/grib2_gfswave.glo_30m.f006 new file mode 100644 index 0000000000..b8ea82ce76 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f006 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAC88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAC88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAC88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAC88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAC88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAC88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAC88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAC88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f009 b/parm/wave/grib2_gfswave.glo_30m.f009 new file mode 100644 index 0000000000..57b88e5db6 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f009 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAD88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAD88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAD88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAD88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAD88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAD88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAD88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAD88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f012 b/parm/wave/grib2_gfswave.glo_30m.f012 new file mode 100644 index 0000000000..3e6c098b84 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f012 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAE88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAE88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAE88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAE88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAE88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAE88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAE88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAE88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f015 b/parm/wave/grib2_gfswave.glo_30m.f015 new file mode 100644 index 0000000000..28c2420b30 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f015 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAF88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAF88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAF88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAF88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAF88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAF88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAF88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAF88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f018 b/parm/wave/grib2_gfswave.glo_30m.f018 new file mode 100644 index 0000000000..a6ded38ecf --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f018 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAG88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAG88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAG88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAG88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAG88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAG88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAG88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAG88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f021 b/parm/wave/grib2_gfswave.glo_30m.f021 new file mode 100644 index 0000000000..ddaaad80f5 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f021 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAH88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAH88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAH88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAH88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAH88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAH88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAH88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAH88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f024 b/parm/wave/grib2_gfswave.glo_30m.f024 new file mode 100644 index 0000000000..f08b512272 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f024 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f027 b/parm/wave/grib2_gfswave.glo_30m.f027 new file mode 100644 index 0000000000..926f7db837 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f027 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f030 b/parm/wave/grib2_gfswave.glo_30m.f030 new file mode 100644 index 0000000000..4799f6dff4 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f030 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f033 b/parm/wave/grib2_gfswave.glo_30m.f033 new file mode 100644 index 0000000000..87f867858f --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f033 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f036 b/parm/wave/grib2_gfswave.glo_30m.f036 new file mode 100644 index 0000000000..c030fe540f --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f036 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f039 b/parm/wave/grib2_gfswave.glo_30m.f039 new file mode 100644 index 0000000000..af21e75e8e --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f039 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f042 b/parm/wave/grib2_gfswave.glo_30m.f042 new file mode 100644 index 0000000000..6c2ed1db8c --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f042 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f045 b/parm/wave/grib2_gfswave.glo_30m.f045 new file mode 100644 index 0000000000..e9af7c48d3 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f045 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f048 b/parm/wave/grib2_gfswave.glo_30m.f048 new file mode 100644 index 0000000000..8e6f08ceda --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f048 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f051 b/parm/wave/grib2_gfswave.glo_30m.f051 new file mode 100644 index 0000000000..7cf17bee6c --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f051 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f054 b/parm/wave/grib2_gfswave.glo_30m.f054 new file mode 100644 index 0000000000..83230fbcb6 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f054 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f057 b/parm/wave/grib2_gfswave.glo_30m.f057 new file mode 100644 index 0000000000..a16252d1dc --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f057 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f060 b/parm/wave/grib2_gfswave.glo_30m.f060 new file mode 100644 index 0000000000..8657aaef61 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f060 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f063 b/parm/wave/grib2_gfswave.glo_30m.f063 new file mode 100644 index 0000000000..10e770b94e --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f063 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f066 b/parm/wave/grib2_gfswave.glo_30m.f066 new file mode 100644 index 0000000000..942497d603 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f066 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f069 b/parm/wave/grib2_gfswave.glo_30m.f069 new file mode 100644 index 0000000000..839d3fb392 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f069 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f072 b/parm/wave/grib2_gfswave.glo_30m.f072 new file mode 100644 index 0000000000..ea2af78e81 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f072 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f078 b/parm/wave/grib2_gfswave.glo_30m.f078 new file mode 100644 index 0000000000..3021da6a37 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f078 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f084 b/parm/wave/grib2_gfswave.glo_30m.f084 new file mode 100644 index 0000000000..4f6ebc8ff0 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f084 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f090 b/parm/wave/grib2_gfswave.glo_30m.f090 new file mode 100644 index 0000000000..0045375fb2 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f090 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f096 b/parm/wave/grib2_gfswave.glo_30m.f096 new file mode 100644 index 0000000000..28cd75597d --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f096 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f102 b/parm/wave/grib2_gfswave.glo_30m.f102 new file mode 100644 index 0000000000..b4528fae64 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f102 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f108 b/parm/wave/grib2_gfswave.glo_30m.f108 new file mode 100644 index 0000000000..f34717ec73 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f108 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f114 b/parm/wave/grib2_gfswave.glo_30m.f114 new file mode 100644 index 0000000000..d595cb13d9 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f114 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f120 b/parm/wave/grib2_gfswave.glo_30m.f120 new file mode 100644 index 0000000000..cd13fb4123 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f120 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f126 b/parm/wave/grib2_gfswave.glo_30m.f126 new file mode 100644 index 0000000000..44e08675da --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f126 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f132 b/parm/wave/grib2_gfswave.glo_30m.f132 new file mode 100644 index 0000000000..5268404dee --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f132 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f138 b/parm/wave/grib2_gfswave.glo_30m.f138 new file mode 100644 index 0000000000..fa38b3221e --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f138 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f144 b/parm/wave/grib2_gfswave.glo_30m.f144 new file mode 100644 index 0000000000..d002662383 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f144 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f150 b/parm/wave/grib2_gfswave.glo_30m.f150 new file mode 100644 index 0000000000..390306be21 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f150 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f156 b/parm/wave/grib2_gfswave.glo_30m.f156 new file mode 100644 index 0000000000..4cd17d276d --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f156 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f162 b/parm/wave/grib2_gfswave.glo_30m.f162 new file mode 100644 index 0000000000..5d24d74cce --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f162 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f168 b/parm/wave/grib2_gfswave.glo_30m.f168 new file mode 100644 index 0000000000..f9d5158852 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f168 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f174 b/parm/wave/grib2_gfswave.glo_30m.f174 new file mode 100644 index 0000000000..dc7577d1e2 --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f174 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.glo_30m.f180 b/parm/wave/grib2_gfswave.glo_30m.f180 new file mode 100644 index 0000000000..9b94c0282f --- /dev/null +++ b/parm/wave/grib2_gfswave.glo_30m.f180 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQAW88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERAW88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EAAW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBAW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECAW88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJAW88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKAW88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELAW88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOAW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMAW88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYAW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENAW88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPAW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f000 b/parm/wave/grib2_gfswave.wc_10m.f000 new file mode 100644 index 0000000000..de854de5fc --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f000 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCA88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCA88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCA88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCA88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCA88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCA88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCA88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCA88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCA88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCA88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 0 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCA88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 0 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCA88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 0 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f003 b/parm/wave/grib2_gfswave.wc_10m.f003 new file mode 100644 index 0000000000..617e9e7b4f --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f003 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCB88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCB88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCB88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCB88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCB88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCB88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCB88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCB88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCB88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCB88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 3 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCB88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 3 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCB88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 3 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f006 b/parm/wave/grib2_gfswave.wc_10m.f006 new file mode 100644 index 0000000000..9ce20a63b1 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f006 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCC88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCC88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCC88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCC88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCC88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCC88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCC88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCC88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCC88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCC88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 6 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCC88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 6 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCC88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 6 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f009 b/parm/wave/grib2_gfswave.wc_10m.f009 new file mode 100644 index 0000000000..07b584002d --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f009 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCD88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCD88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCD88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCD88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCD88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCD88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCD88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCD88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCD88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCD88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 9 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCD88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 9 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCD88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 9 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f012 b/parm/wave/grib2_gfswave.wc_10m.f012 new file mode 100644 index 0000000000..6a1c38ef68 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f012 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCE88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCE88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCE88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCE88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCE88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCE88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCE88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCE88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCE88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCE88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 12 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCE88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 12 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCE88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 12 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f015 b/parm/wave/grib2_gfswave.wc_10m.f015 new file mode 100644 index 0000000000..0b3333560b --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f015 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCF88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCF88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCF88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCF88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCF88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCF88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCF88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCF88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCF88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCF88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 15 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCF88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 15 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCF88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 15 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f018 b/parm/wave/grib2_gfswave.wc_10m.f018 new file mode 100644 index 0000000000..404773d954 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f018 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCG88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCG88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCG88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCG88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCG88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCG88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCG88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCG88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCG88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCG88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 18 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCG88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 18 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCG88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 18 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f021 b/parm/wave/grib2_gfswave.wc_10m.f021 new file mode 100644 index 0000000000..06c297e275 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f021 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCH88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCH88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCH88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCH88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCH88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCH88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCH88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCH88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCH88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCH88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 21 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCH88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 21 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCH88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 21 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f024 b/parm/wave/grib2_gfswave.wc_10m.f024 new file mode 100644 index 0000000000..28e4cfa904 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f024 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 24 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 24 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 24 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f027 b/parm/wave/grib2_gfswave.wc_10m.f027 new file mode 100644 index 0000000000..2f2ddf1d1b --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f027 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCI88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCI88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCI88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCI88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCI88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCI88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCI88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCI88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCI88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCI88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 27 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCI88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 27 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCI88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 27 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f030 b/parm/wave/grib2_gfswave.wc_10m.f030 new file mode 100644 index 0000000000..d0725e80b4 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f030 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 30 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 30 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 30 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f033 b/parm/wave/grib2_gfswave.wc_10m.f033 new file mode 100644 index 0000000000..f89ed37542 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f033 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCJ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCJ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCJ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCJ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCJ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCJ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCJ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCJ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCJ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCJ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 33 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCJ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 33 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCJ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 33 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f036 b/parm/wave/grib2_gfswave.wc_10m.f036 new file mode 100644 index 0000000000..88f84d150a --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f036 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 36 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 36 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 36 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f039 b/parm/wave/grib2_gfswave.wc_10m.f039 new file mode 100644 index 0000000000..9883f8ad98 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f039 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCK88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCK88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCK88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCK88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCK88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCK88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCK88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCK88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCK88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCK88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 39 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCK88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 39 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCK88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 39 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f042 b/parm/wave/grib2_gfswave.wc_10m.f042 new file mode 100644 index 0000000000..499279984a --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f042 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 42 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 42 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 42 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f045 b/parm/wave/grib2_gfswave.wc_10m.f045 new file mode 100644 index 0000000000..8ac60c51e8 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f045 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCL88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCL88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCL88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCL88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCL88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCL88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCL88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCL88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCL88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCL88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 45 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCL88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 45 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCL88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 45 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f048 b/parm/wave/grib2_gfswave.wc_10m.f048 new file mode 100644 index 0000000000..7da32742dc --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f048 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 48 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 48 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 48 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f051 b/parm/wave/grib2_gfswave.wc_10m.f051 new file mode 100644 index 0000000000..fe2762bc94 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f051 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCM88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCM88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCM88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCM88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCM88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCM88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCM88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCM88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCM88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCM88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 51 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCM88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 51 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCM88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 51 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f054 b/parm/wave/grib2_gfswave.wc_10m.f054 new file mode 100644 index 0000000000..d1c9d07a65 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f054 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 54 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 54 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 54 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f057 b/parm/wave/grib2_gfswave.wc_10m.f057 new file mode 100644 index 0000000000..d03780335d --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f057 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCX88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCX88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCX88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCX88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCX88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCX88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCX88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCX88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCX88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCX88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 57 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCX88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 57 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCX88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 57 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f060 b/parm/wave/grib2_gfswave.wc_10m.f060 new file mode 100644 index 0000000000..8f87f58223 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f060 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 60 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 60 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 60 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f063 b/parm/wave/grib2_gfswave.wc_10m.f063 new file mode 100644 index 0000000000..bc5ce48621 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f063 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCN88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCN88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCN88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCN88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCN88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCN88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCN88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCN88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCN88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCN88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 63 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCN88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 63 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCN88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 63 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f066 b/parm/wave/grib2_gfswave.wc_10m.f066 new file mode 100644 index 0000000000..33e35d5003 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f066 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 66 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 66 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 66 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f069 b/parm/wave/grib2_gfswave.wc_10m.f069 new file mode 100644 index 0000000000..dddb78d455 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f069 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCY88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCY88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCY88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCY88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCY88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCY88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCY88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCY88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCY88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCY88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 69 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCY88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 69 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCY88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 69 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f072 b/parm/wave/grib2_gfswave.wc_10m.f072 new file mode 100644 index 0000000000..3947625510 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f072 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 72 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 72 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 72 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f078 b/parm/wave/grib2_gfswave.wc_10m.f078 new file mode 100644 index 0000000000..52d71b8ea0 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f078 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCO88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCO88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCO88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCO88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCO88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCO88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCO88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCO88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCO88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCO88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 78 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCO88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 78 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCO88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 78 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f084 b/parm/wave/grib2_gfswave.wc_10m.f084 new file mode 100644 index 0000000000..e534f1c308 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f084 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 84 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 84 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 84 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f090 b/parm/wave/grib2_gfswave.wc_10m.f090 new file mode 100644 index 0000000000..0b2a1e5198 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f090 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCP88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCP88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCP88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCP88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCP88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCP88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCP88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCP88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCP88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCP88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 90 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCP88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 90 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCP88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 90 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f096 b/parm/wave/grib2_gfswave.wc_10m.f096 new file mode 100644 index 0000000000..0e54d38848 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f096 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 96 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 96 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 96 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f102 b/parm/wave/grib2_gfswave.wc_10m.f102 new file mode 100644 index 0000000000..0d5f302fa1 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f102 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCQ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCQ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCQ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCQ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCQ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCQ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCQ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCQ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCQ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCQ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 102 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCQ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 102 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCQ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 102 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f108 b/parm/wave/grib2_gfswave.wc_10m.f108 new file mode 100644 index 0000000000..50ff238485 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f108 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 108 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 108 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 108 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f114 b/parm/wave/grib2_gfswave.wc_10m.f114 new file mode 100644 index 0000000000..244e459484 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f114 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCZ88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCZ88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCZ88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCZ88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCZ88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCZ88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCZ88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCZ88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCZ88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCZ88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 114 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCZ88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 114 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCZ88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 114 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f120 b/parm/wave/grib2_gfswave.wc_10m.f120 new file mode 100644 index 0000000000..9b29cdc0d1 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f120 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 120 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 120 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 120 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f126 b/parm/wave/grib2_gfswave.wc_10m.f126 new file mode 100644 index 0000000000..7b67a32560 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f126 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCR88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCR88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCR88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCR88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCR88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCR88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCR88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCR88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCR88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCR88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 126 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCR88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 126 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCR88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 126 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f132 b/parm/wave/grib2_gfswave.wc_10m.f132 new file mode 100644 index 0000000000..783bfaf0e2 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f132 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 132 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 132 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 132 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f138 b/parm/wave/grib2_gfswave.wc_10m.f138 new file mode 100644 index 0000000000..292160e70f --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f138 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCS88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCS88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCS88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCS88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCS88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCS88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCS88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCS88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCS88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCS88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 138 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCS88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 138 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCS88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 138 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f144 b/parm/wave/grib2_gfswave.wc_10m.f144 new file mode 100644 index 0000000000..ccfd82dd78 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f144 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 144 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 144 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 144 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f150 b/parm/wave/grib2_gfswave.wc_10m.f150 new file mode 100644 index 0000000000..8a4891b48b --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f150 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCT88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCT88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCT88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCT88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCT88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCT88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCT88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCT88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCT88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCT88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 150 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCT88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 150 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCT88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 150 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f156 b/parm/wave/grib2_gfswave.wc_10m.f156 new file mode 100644 index 0000000000..a581cbe253 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f156 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 156 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 156 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 156 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f162 b/parm/wave/grib2_gfswave.wc_10m.f162 new file mode 100644 index 0000000000..c54e1289dd --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f162 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCU88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCU88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCU88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCU88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCU88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCU88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCU88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCU88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCU88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCU88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 162 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCU88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 162 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCU88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 162 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f168 b/parm/wave/grib2_gfswave.wc_10m.f168 new file mode 100644 index 0000000000..6bd248c568 --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f168 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 168 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 168 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 168 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f174 b/parm/wave/grib2_gfswave.wc_10m.f174 new file mode 100644 index 0000000000..bd1894388c --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f174 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCV88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCV88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCV88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCV88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCV88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCV88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCV88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCV88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCV88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCV88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 174 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCV88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 174 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCV88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 174 241 0 2 255 0 0 / diff --git a/parm/wave/grib2_gfswave.wc_10m.f180 b/parm/wave/grib2_gfswave.wc_10m.f180 new file mode 100644 index 0000000000..4c8cb145de --- /dev/null +++ b/parm/wave/grib2_gfswave.wc_10m.f180 @@ -0,0 +1,16 @@ +&GRIBIDS DESC=' WIND Surface ',WMOHEAD='EQCW88 KWBJ',PDTN= 0 ,PDT= 2 1 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WDIR Surface ',WMOHEAD='ERCW88 KWBJ',PDTN= 0 ,PDT= 2 0 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' U GRD Surface ',WMOHEAD='EACW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 2 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' V GRD Surface ',WMOHEAD='EBCW88 KWBJ',EXTRACT=.true.,PDTN= 0 ,PDT= 2 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' HTSGW Surface ',WMOHEAD='ECCW88 KWBJ',PDTN= 0 ,PDT= 0 3 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' PERPW Surface ',WMOHEAD='EJCW88 KWBJ',PDTN= 0 ,PDT= 0 11 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' DIRPW Surface ',WMOHEAD='EKCW88 KWBJ',PDTN= 0 ,PDT= 0 10 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' WVHGT Surface ',WMOHEAD='ELCW88 KWBJ',PDTN= 0 ,PDT= 0 5 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWELL Order Seq. Of Data ',WMOHEAD='EOCW88 KWBJ',PDTN= 0 ,PDT= 0 8 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVPER Surface ',WMOHEAD='EMCW88 KWBJ',PDTN= 0 ,PDT= 0 6 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWPER Order Seq. Of Data ',WMOHEAD='EYCW88 KWBJ',PDTN= 0 ,PDT= 0 9 2 0 11 0 0 1 180 241 0 2 255 0 0 / +&GRIBIDS DESC=' WVDIR Surface ',WMOHEAD='ENCW88 KWBJ',PDTN= 0 ,PDT= 0 4 2 0 11 0 0 1 180 1 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 1 255 0 0 / +&GRIBIDS DESC=' SWDIR Order Seq. Of Data ',WMOHEAD='EPCW88 KWBJ',PDTN= 0 ,PDT= 0 7 2 0 11 0 0 1 180 241 0 2 255 0 0 / diff --git a/parm/wave/reg025_interp.inp.tmpl b/parm/wave/reg025_interp.inp.tmpl new file mode 100755 index 0000000000..c65289bbae --- /dev/null +++ b/parm/wave/reg025_interp.inp.tmpl @@ -0,0 +1,12 @@ +$ Input file for interpolation of GLO30m_ext Grid +$------------------------------------------------ +$ Start Time DT NSteps + TIME DT NSTEPS +$ Total number of grids + 2 +$ Grid extensions + 'mx025' + 'reg025' +$ + 0 +$ diff --git a/parm/wave/wave_gfs.buoys b/parm/wave/wave_gfs.buoys deleted file mode 100755 index c7c1aec357..0000000000 --- a/parm/wave/wave_gfs.buoys +++ /dev/null @@ -1,614 +0,0 @@ -$ -$ Global output point data file for multi-grid wave model -$ -$ Key to data in file: -$ -$ LON Longitude, east positive -$ LAT Latitude -$ NAME Output point name C*10, no blanks in name allowed -$ AH Anemometer height, dummy value for none-data points -$ TYPE Buoy type indicator, used for plotting and postprocessing -$ DAT Data point -$ NBY 'Non-NWS Virtual buoy' -$ SOURCE Source of data point -$ ENCAN Environment Canada -$ GOMOOS Gulf of Maine OOS -$ IDT Irish Department of Transportation -$ METFR Meteo France -$ NCEP Boundary and other data points -$ NDBC National Data Buoy Center -$ PRIV Private and incidental data sources -$ SCRIPPS Scripps -$ UKMO UK Met Office -$ PDES Puertos del Estados -$ SHOM Service Hydrographique et Oceanographique de la Marine -$ OCNOR Fugro Oceanor -$ WHOI Woods Hole Oceanographic Institute -$ SKOREA South Korea -$ MVEW Ministerie van Verkeer en Waterstaat -$ CORMP Coastal Ocean Research and Monitoring Program -$ DIMAR Direccion General Maritima (Columbia) -$ BP British Petroleum -$ SCALE Scale indicator for plotting of locations on map -$ Point will only be plotted if SCALE =< DX in our -$ GrADS scripts, DX is width of plot in logitude -$ -$ Notes: -$ -$ - The '$' at the first position identifies comments for WAVEWATCH III -$ input. -$ - The first three data columns are used by the forecats code, the other -$ are used by postprocessing scripts. -$ -$ LON LAT NAME AH TYPE SOURCE SCALE -$ --------------------------------------------------------- -$ -$ AWIPS Data section (most actual observational sites) -$ AWIPS code indicated prior and after each AWIPS section -$ -$AGGA48 -$ Gulf of Alaska (AG) Spectral data (4) near S/SW Alaska Anchorage (8) - -148.02 56.31 '46001 ' 5.0 DAT NDBC 360 - -154.98 52.70 '46066 ' 5.0 DAT NDBC 360 - -146.83 60.22 '46061 ' 5.0 DAT NDBC 90 - -160.81 53.93 '46075 ' 5.0 DAT NDBC 360 - -148.00 59.50 '46076 ' 5.0 DAT NDBC 360 - -152.45 56.05 '46078 ' 5.0 DAT NDBC 360 - -152.09 59.76 '46106 ' 999 DAT NDBC 75 - -150.00 58.00 '46080 ' 5.0 DAT NDBC 360 - -151.829 59.597 '46108 ' 5.0 DAT NDBC 45 - -160.000 57.700 '46021 ' 999.0 DAT NDBC 45 - -146.805 60.584 '46060 ' 5.0 DAT NDBC 45 - -154.175 57.910 '46077 ' 5.0 DAT NDBC 45 - -152.230 59.050 '46079 ' 4.9 DAT NDBC 45 - -152.233 59.049 '46105 ' 2.0 DAT NDBC 45 - -147.992 59.925 '46107 ' 2.0 DAT NDBC 45 - -165.446 64.489 '46265 ' 2.0 DAT NDBC 45 -$AGGA48 -$ -$AGGA47 -$ Gulf of Alaska (AG) Spectral data (4) near Alaska Panhandle and NBC (7) - -136.10 50.93 '46004 ' 5.0 DAT ENCAN 360 - -138.85 53.91 '46184 ' 5.0 DAT ENCAN 360 - -143.42 59.69 '46082 ' 5.0 DAT NDBC 360 - -138.00 58.25 '46083 ' 5.0 DAT NDBC 360 - -136.16 56.59 '46084 ' 5.0 DAT NDBC 360 - -142.56 56.85 '46085 ' 5.0 DAT NDBC 360 - -134.28 54.16 '46205 ' 5.0 DAT ENCAN 45 - -132.45 54.38 '46145 ' 5.0 DAT ENCAN 45 - -131.22 51.83 '46147 ' 5.0 DAT ENCAN 90 - -131.10 53.62 '46183 ' 5.0 DAT ENCAN 45 - -129.81 52.42 '46185 ' 5.0 DAT ENCAN 45 - -128.75 51.37 '46204 ' 5.0 DAT ENCAN 45 - -129.92 50.87 '46207 ' 5.0 DAT ENCAN 45 - -132.68 52.52 '46208 ' 5.0 DAT ENCAN 45 - -129.795 52.437 '46138 ' 999.0 DAT NDBC 45 -$AGGA47 -$ -$AGPZ46 -$ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) - -130.27 42.60 '46002 ' 5.0 DAT NDBC 360 - -137.48 40.80 '46006 ' 5.0 DAT NDBC 360 - -130.00 37.98 '46059 ' 5.0 DAT NDBC 360 - -120.87 34.88 '46011 ' 5.0 DAT NDBC 15 - -122.88 37.36 '46012 ' 5.0 DAT NDBC 45 - -123.32 38.23 '46013 ' 5.0 DAT NDBC 25 - -123.97 39.22 '46014 ' 5.0 DAT NDBC 45 - -124.54 40.78 '46022 ' 5.0 DAT NDBC 25 - -120.97 34.71 '46023 ' 10.0 DAT NDBC 45 - -122.82 37.75 '46026 ' 5.0 DAT NDBC 25 - -124.38 41.85 '46027 ' 5.0 DAT NDBC 45 - -124.85 42.75 '46015 ' 5.0 DAT NDBC 45 - -119.08 33.75 '46025 ' 5.0 DAT NDBC 45 - -121.89 35.74 '46028 ' 5.0 DAT NDBC 45 - -124.53 40.42 '46030 ' 5.0 DAT NDBC 15 - -122.42 36.75 '46042 ' 5.0 DAT NDBC 45 - -119.53 32.43 '46047 ' 5.0 DAT NDBC 45 - -124.53 44.62 '46050 ' 5.0 DAT NDBC 45 - -119.85 34.24 '46053 ' 5.0 DAT NDBC 45 - -120.45 34.27 '46054 ' 10.0 DAT NDBC 25 - -121.01 35.10 '46062 ' 5.0 DAT NDBC 45 - -120.70 34.27 '46063 ' 5.0 DAT NDBC 45 - -120.20 33.65 '46069 ' 5.0 DAT NDBC 45 - -118.00 32.50 '46086 ' 5.0 DAT NDBC 45 - -125.77 45.88 '46089 ' 5.0 DAT NDBC 45 - -124.74 40.29 '46213 ' 999. DAT SCRIPPS 25 - -123.47 37.95 '46214 ' 999. DAT SCRIPPS 45 - -119.80 34.33 '46216 ' 999. DAT SCRIPPS 15 - -119.43 34.17 '46217 ' 999. DAT SCRIPPS 15 - -120.78 34.45 '46218 ' 999. DAT SCRIPPS 25 - -119.88 33.22 '46219 ' 999. DAT SCRIPPS 45 - -118.63 33.85 '46221 ' 999. DAT SCRIPPS 15 - -118.32 33.62 '46222 ' 999. DAT SCRIPPS 15 - -117.77 33.46 '46223 ' 999. DAT SCRIPPS 15 - -117.47 33.18 '46224 ' 999. DAT SCRIPPS 15 - -117.39 32.93 '46225 ' 999. DAT SCRIPPS 15 - -117.44 32.63 '46227 ' 999. DAT SCRIPPS 15 - -124.55 43.77 '46229 ' 999. DAT SCRIPPS 25 - -117.37 32.75 '46231 ' 999. DAT SCRIPPS 15 - -117.421 32.530 '46232 ' 999. DAT SCRIPPS 15 - -120.86 35.20 '46215 ' 999. DAT SCRIPPS 45 - -121.95 36.76 '46236 ' 999. DAT SCRIPPS 15 - -122.634 37.787 '46237 ' 999. DAT SCRIPPS 15 - -119.47 33.40 '46238 ' 999. DAT SCRIPPS 15 - -122.10 36.34 '46239 ' 999. DAT SCRIPPS 15 - -121.91 36.62 '46240 ' 999. DAT SCRIPPS 15 - -124.13 46.22 '46243 ' 999. DAT SCRIPPS 45 - -124.36 40.89 '46244 ' 999. DAT SCRIPPS 45 - -145.20 50.033 '46246 ' 999. DAT SCRIPPS 45 - -124.67 46.13 '46248 ' 999. DAT SCRIPPS 45 - -119.200 33.000 '46024 ' 10.0 DAT NDBC 45 - -121.899 36.835 '46091 ' 4.0 DAT NDBC 45 - -122.030 36.750 '46092 ' 4.0 DAT NDBC 45 - -122.410 36.690 '46093 ' 4.0 DAT NDBC 45 - -124.300 44.642 '46094 ' 3.0 DAT NDBC 45 - -124.304 44.639 '46097 ' 4.5 DAT NDBC 45 - -124.956 44.381 '46098 ' 4.5 DAT NDBC 45 - -122.351 36.723 '46114 ' 999.0 DAT NDBC 45 - -124.313 40.753 '46212 ' 999.0 DAT NDBC 45 - -117.353 32.848 '46226 ' 999.0 DAT NDBC 45 - -117.320 32.936 '46233 ' 3.0 DAT NDBC 45 - -117.167 32.572 '46235 ' 999.0 DAT NDBC 45 - -117.439 33.220 '46242 ' 999.0 DAT NDBC 45 - -122.833 37.753 '46247 ' 999.0 DAT NDBC 45 - -119.708 33.821 '46249 ' 999.0 DAT NDBC 45 - -119.090 34.034 '46250 ' 999.0 DAT NDBC 45 - -119.550 33.760 '46251 ' 999.0 DAT NDBC 45 - -119.257 33.953 '46252 ' 999.0 DAT NDBC 45 - -118.181 33.576 '46253 ' 999.0 DAT NDBC 45 - -117.267 32.868 '46254 ' 999.0 DAT NDBC 45 - -119.651 33.400 '46255 ' 999.0 DAT NDBC 45 - -118.201 33.700 '46256 ' 999.0 DAT NDBC 45 - -120.766 34.439 '46257 ' 999.0 DAT NDBC 45 - -117.500 32.750 '46258 ' 999.0 DAT NDBC 45 - -121.497 34.767 '46259 ' 999.0 DAT NDBC 45 - -119.004 33.704 '46262 ' 999.0 DAT NDBC 45 -$AGPZ46 -$ -$AGPZ47 -$ Eastern Pacific (PZ) spectral data (4) near Alaska Panhandle and NBC (7) - -131.02 46.05 '46005 ' 5.0 DAT NDBC 360 - -133.94 48.35 '46036 ' 5.0 DAT ENCAN 360 - -127.93 49.74 '46132 ' 5.0 DAT ENCAN 90 - -126.00 48.84 '46206 ' 5.0 DAT ENCAN 45 - -124.51 46.12 '46029 ' 5.0 DAT NDBC 45 - -124.75 47.34 '46041 ' 5.0 DAT NDBC 45 - -124.73 48.49 '46087 ' 5.0 DAT NDBC 45 - -124.24 46.86 '46211 ' 999. DAT SCRIPPS 25 - -123.165 48.334 '46088 ' 5.0 DAT NDBC 45 - -124.127 46.173 '46096 ' 3.0 DAT NDBC 45 - -124.566 46.986 '46099 ' 4.5 DAT NDBC 45 - -124.972 46.851 '46100 ' 4.5 DAT NDBC 45 - -124.950 47.967 '46119 ' 3.7 DAT NDBC 45 - -124.063 46.215 '46127 ' 3.0 DAT NDBC 45 - -126.010 48.844 '46139 ' 999.0 DAT NDBC 45 - -151.700 57.480 '46264 ' 999.0 DAT NDBC 45 -$AGPZ47 -$ -$AGPN48 -$ North Pacific and Behring Sea (PN) spectra (4) near S/SW Alaska Anchorage (8) - -177.58 57.05 '46035 ' 10.0 DAT NDBC 360 - 175.28 55.00 '46070 ' 5.0 DAT NDBC 360 - -172.03 54.94 '46073 ' 10.0 DAT NDBC 360 - 179.05 51.16 '46071 ' 5.0 DAT NDBC 360 - -171.73 52.25 '46072 ' 5.0 DAT NDBC 360 - -168.000 55.883 '46020 ' 999.0 DAT NDBC 360 -$AGPN48 -$ -$AGHW40 -$ Hawaiian waters (HW) spectra (4) in Pacific Ocean and Pacific Isles (0) - -162.21 23.43 '51001 ' 5.0 DAT NDBC 360 - -157.78 17.19 '51002 ' 5.0 DAT NDBC 360 - -160.82 19.22 '51003 ' 5.0 DAT NDBC 360 - -152.48 17.52 '51004 ' 5.0 DAT NDBC 360 - -158.12 21.67 '51201 ' 999. DAT SCRIPPS 11 - -157.68 21.42 '51202 ' 999. DAT SCRIPPS 11 - -154.06 23.55 '51000 ' 5.0 DAT NDBC 11 - -153.90 23.56 '51100 ' 5.0 DAT NDBC 11 - -162.06 24.32 '51101 ' 5.0 DAT NDBC 11 - -157.00 20.79 '51203 ' 999. DAT SCRIPPS 11 - -158.12 21.28 '51204 ' 999. DAT SCRIPPS 11 - -156.42 21.02 '51205 ' 999. DAT SCRIPPS 11 - -154.97 19.78 '51206 ' 999. DAT SCRIPPS 11 - -157.75 21.48 '51207 ' 999. DAT SCRIPPS 11 - -153.87 0.02 '51028 ' 5.0 DAT NDBC 11 - -158.303 21.096 '51200 ' 999.0 DAT NDBC 11 - -159.574 22.285 '51208 ' 999. DAT SCRIPPS 11 - -170.493 -14.264 '51209 ' 999.0 DAT NDBC 360 - -157.756 21.477 '51210 ' 999.0 DAT NDBC 11 - -134.667 7.630 '52212 ' 999.0 DAT NDBC 360 - -157.959 21.297 '51211 ' 999.0 DAT NDBC 360 - -158.150 21.323 '51212 ' 999.0 DAT NDBC 360 - -157.003 20.750 '51213 ' 999.0 DAT NDBC 360 -$AGHW40 -$ -$AGPW40 -$ Western Pacific (PW) spectra (4) in Pacific Ocean and Pacific Isles (0) - 144.79 13.35 '52200 ' 999. DAT SCRIPPS 360 - 126.02 37.23 '22101 ' 999. DAT SKOREA 100 - 125.77 34.80 '22102 ' 999. DAT SKOREA 100 - 127.50 34.00 '22103 ' 999. DAT SKOREA 100 - 128.90 34.77 '22104 ' 999. DAT SKOREA 100 - 130.00 37.53 '22105 ' 999. DAT SKOREA 100 - 171.40 7.09 '52201 ' 999. DAT SCRIPPS 360 - 144.80 13.68 '52202 ' 999. DAT SCRIPPS 360 - 145.66 15.27 '52211 ' 999. DAT SCRIPPS 360 - 133.62 33.19 '21178 ' 999. DAT WMO 360 - 131.11 37.46 '21229 ' 999. DAT WMO 360 - 125.75 36.25 '22108 ' 999. DAT WMO 360 - 126.14 33.79 '22184 ' 999. DAT WMO 360 - 125.43 37.09 '22185 ' 999. DAT WMO 360 - 125.81 35.66 '22186 ' 999. DAT WMO 360 - 127.02 33.13 '22187 ' 999. DAT WMO 360 - 128.23 34.39 '22188 ' 999. DAT WMO 360 - 129.84 35.35 '22189 ' 999. DAT WMO 360 - 129.87 36.91 '22190 ' 999. DAT WMO 360 -$AGPW40 -$ -$AGPS40 -$ South Pacific (PS) in Pacific Ocean and Pacific Isles (0) - 150.18 -37.29 '55020 ' 999. DAT UNKNOWN 50 - 151.07 -23.31 '55033 ' 999. DAT UNKNOWN 50 - 153.63 -27.49 '55035 ' 999. DAT UNKNOWN 50 - 148.19 -38.60 '55039 ' 999. DAT UNKNOWN 50 -$AGPS40 -$ -$AGGX42 -$ Gulf of Mexico (GX) spectra (4) south from NC and Puerto Rico (2) - -89.67 25.90 '42001 ' 10.0 DAT NDBC 360 - -94.42 25.17 '42002 ' 10.0 DAT NDBC 360 - -85.94 26.07 '42003 ' 10.0 DAT NDBC 360 - -88.77 30.09 '42007 ' 5.0 DAT NDBC 90 - -95.36 27.91 '42019 ' 5.0 DAT NDBC 90 - -96.70 26.94 '42020 ' 5.0 DAT NDBC 90 - -94.40 29.22 '42035 ' 5.0 DAT NDBC 90 - -84.52 28.50 '42036 ' 5.0 DAT NDBC 90 - -86.02 28.79 '42039 ' 5.0 DAT NDBC 90 - -88.21 29.18 '42040 ' 5.0 DAT NDBC 90 - -90.46 27.50 '42041 ' 5.0 DAT NDBC 90 - -92.55 27.42 '42038 ' 5.0 DAT NDBC 90 - -94.05 22.01 '42055 ' 10.0 DAT NDBC 360 - -84.274 27.345 '42099 ' 999. DAT SCRIPPS 100 - -87.55 30.06 '42012 ' 5.0 DAT NDBC 90 - -88.49 28.19 '42887 ' 48.2 DAT BP 90 - -82.924 27.173 '42013 ' 3.1 DAT NDBC 90 - -82.220 25.254 '42014 ' 2.8 DAT NDBC 90 - -83.306 28.311 '42021 ' 2.8 DAT NDBC 90 - -83.741 27.504 '42022 ' 3.1 DAT NDBC 90 - -83.086 26.010 '42023 ' 3.1 DAT NDBC 90 - -94.899 28.982 '42043 ' 3.4 DAT NDBC 90 - -97.051 26.191 '42044 ' 3.4 DAT NDBC 90 - -96.500 26.217 '42045 ' 3.4 DAT NDBC 90 - -94.037 27.890 '42046 ' 3.4 DAT NDBC 90 - -93.597 27.896 '42047 ' 3.4 DAT NDBC 90 - -88.647 30.042 '42067 ' 5.0 DAT NDBC 90 - -83.650 25.700 '42097 ' 999.0 DAT NDBC 90 - -82.931 27.589 '42098 ' 999.0 DAT NDBC 90 - -90.471 26.672 '42360 ' 3.0 DAT NDBC 90 - -92.490 27.550 '42361 ' 122.0 DAT NDBC 90 - -90.648 27.795 '42362 ' 122.0 DAT NDBC 90 - -89.220 28.160 '42363 ' 122.0 DAT NDBC 90 - -88.090 29.060 '42364 ' 122.0 DAT NDBC 90 - -89.120 28.200 '42365 ' 122.0 DAT NDBC 90 - -90.283 27.207 '42369 ' 60.4 DAT NDBC 90 - -90.536 27.322 '42370 ' 78.7 DAT NDBC 90 - -88.056 28.866 '42374 ' 61.0 DAT NDBC 90 - -88.289 28.521 '42375 ' 61.0 DAT NDBC 90 - -87.944 29.108 '42376 ' 61.0 DAT NDBC 90 - -94.898 26.129 '42390 ' 61.0 DAT NDBC 90 - -90.027 27.196 '42392 ' 100.0 DAT NDBC 90 - -89.240 28.157 '42394 ' 100.0 DAT NDBC 90 - -90.792 26.404 '42395 ' 3.0 DAT NDBC 90 -$AGGX42 -$ -$AGCA42 -$ Caribbean Sea (CA) spectra (4) south from NC and Puerto Rico (2) - -85.06 19.87 '42056 ' 10.0 DAT NDBC 360 - -81.50 16.83 '42057 ' 10.0 DAT NDBC 360 - -75.06 15.09 '42058 ' 10.0 DAT NDBC 360 - -81.95 24.39 '42080 ' 999. DAT NDBC 45 - -67.50 15.01 '42059 ' 5.0 DAT NDBC 360 - -85.38 -19.62 '32012' 999. DAT WHOI 360 - -63.50 16.50 '42060 ' 5.0 DAT NDBC 360 - -74.681 11.161 '41194 ' 999.0 DAT NDBC 90 - -66.524 17.860 '42085 ' 4.0 DAT NDBC 90 - -80.061 19.699 '42089 ' 3.4 DAT NDBC 90 - -64.763 18.251 '41052 ' 4.0 DAT NDBC 90 - -65.004 18.257 '41051 ' 4.0 DAT NDBC 90 - -65.457 18.260 '41056 ' 4.0 DAT NDBC 90 - -67.280 18.379 '41115 ' 999.0 DAT NDBC 90 - -81.080 30.000 '41117 ' 999.0 DAT NDBC 90 - -81.244 24.535 '42079 ' 999.0 DAT NDBC 90 - -75.042 36.000 '42086 ' 999.0 DAT NDBC 90 - -81.967 24.407 '42095 ' 999.0 DAT NDBC 90 -$AGCA42 -$ -$AGNT42 -$ Western Atlantic (NT) spectra (4) south from NC and Puerto Rico (2) - -72.66 34.68 '41001 ' 5.0 DAT NDBC 360 - -75.36 32.32 '41002 ' 5.0 DAT NDBC 360 - -79.09 32.50 '41004 ' 5.0 DAT NDBC 360 - -80.87 31.40 '41008 ' 5.0 DAT NDBC 360 - -80.17 28.50 '41009 ' 5.0 DAT NDBC 80 - -78.47 28.95 '41010 ' 5.0 DAT NDBC 80 - -80.60 30.00 '41012 ' 5.0 DAT NDBC 80 - -77.74 33.44 '41013 ' 5.0 DAT NDBC 80 - -75.40 35.01 '41025 ' 5.0 DAT NDBC 80 - -77.28 34.48 '41035 ' 5.0 DAT NDBC 80 - -76.95 34.21 '41036 ' 5.0 DAT NDBC 80 - -65.01 20.99 '41043 ' 5.0 DAT NDBC 90 - -70.99 24.00 '41046 ' 5.0 DAT NDBC 90 - -71.49 27.47 '41047 ' 10.0 DAT NDBC 90 - -69.65 31.98 '41048 ' 10.0 DAT NDBC 90 - -81.29 30.72 '41112 ' 999. DAT SCRIPPS 30 - -80.53 28.40 '41113 ' 999. DAT SCRIPPS 30 - -80.22 27.55 '41114 ' 999. DAT SCRIPPS 30 - -74.84 36.61 '44014 ' 5.0 DAT NDBC 90 - -77.36 33.99 '41037 ' 3.0 DAT CORMP 80 - -77.72 34.14 '41038 ' 3.0 DAT CORMP 80 - -63.00 27.50 '41049 ' 5.0 DAT NDBC 90 - -58.69 21.65 '41044 ' 5.0 DAT NDBC 90 - -77.30 34.48 '41109 ' 3.0 DAT CORMP 80 - -77.71 34.14 '41110 ' 3.0 DAT CORMP 80 - -67.28 18.38 '41111 ' 3.0 DAT CORMP 80 - -66.099 18.474 '41053 ' 5.0 DAT NDBC 80 - -65.157 18.476 '41058 ' 5.0 DAT NDBC 80 - -78.484 33.837 '41024 ' 3.0 DAT NDBC 80 - -78.137 33.302 '41027 ' 3.0 DAT NDBC 80 - -79.624 32.803 '41029 ' 3.0 DAT NDBC 80 - -79.340 32.520 '41030 ' 3.0 DAT NDBC 80 - -80.410 32.279 '41033 ' 3.0 DAT NDBC 80 - -38.000 24.581 '41061 ' 2.7 DAT NDBC 80 - -75.095 35.778 '41062 ' 3.5 DAT NDBC 80 - -75.941 34.782 '41063 ' 3.5 DAT NDBC 80 - -76.949 34.207 '41064 ' 3.0 DAT NDBC 80 - -78.015 33.721 '41108 ' 999.0 DAT NDBC 80 - -76.948 34.210 '41159 ' 999.0 DAT NDBC 80 - -75.714 36.200 '44056 ' 999.0 DAT NDBC 80 -$AGNT42 -$ -$AGNT41 -$ Western Atlantic (NT) spectra (4) NE states north of VA (1) - -53.62 44.26 '44138 ' 5.0 DAT ENCAN 360 - -66.58 41.11 '44011 ' 5.0 DAT NDBC 360 - -58.00 43.00 '44141 ' 5.0 DAT ENCAN 360 - -64.02 42.50 '44142 ' 5.0 DAT ENCAN 360 - -48.01 46.77 'WRB07 ' 10.0 DAT PRIV 360 - -62.00 42.26 '44137 ' 5.0 DAT ENCAN 360 - -57.08 44.26 '44139 ' 5.0 DAT ENCAN 360 - -51.74 43.75 '44140 ' 5.0 DAT ENCAN 360 - -64.01 42.50 '44150 ' 5.0 DAT ENCAN 360 - -70.43 38.48 '44004 ' 5.0 DAT NDBC 90 - -69.16 43.19 '44005 ' 5.0 DAT NDBC 90 - -69.43 40.50 '44008 ' 5.0 DAT NDBC 90 - -74.70 38.46 '44009 ' 5.0 DAT NDBC 90 - -72.10 40.70 '44017 ' 5.0 DAT NDBC 80 - -69.29 41.26 '44018 ' 5.0 DAT NDBC 80 - -73.17 40.25 '44025 ' 5.0 DAT NDBC 80 - -71.01 41.38 '44070 ' 999. DAT NDBC 60 - -65.93 42.31 '44024 ' 4.0 DAT GOMOOS 80 - -67.31 44.27 '44027 ' 5.0 DAT NDBC 80 - -67.88 43.49 '44037 ' 4.0 DAT GOMOOS 80 - -66.55 43.62 '44038 ' 4.0 DAT GOMOOS 80 - -53.39 46.44 '44251 ' 5.0 DAT ENCAN 80 - -57.35 47.28 '44255 ' 5.0 DAT ENCAN 80 - -75.720 36.915 '44099 ' 999. DAT SCRIPPS 90 - -75.59 36.26 '44100 ' 999. DAT SCRIPPS 90 - -72.60 39.58 '44066 ' 5.0 DAT NDBC 80 - -75.492 36.872 '44093 ' 999. DAT SCRIPPS 80 - -75.33 35.75 '44095 ' 999. DAT SCRIPPS 80 - -75.809 37.023 '44096 ' 999. DAT SCRIPPS 80 - -71.12 40.98 '44097 ' 999. DAT SCRIPPS 80 - -70.17 42.80 '44098 ' 999. DAT SCRIPPS 80 - -70.141 43.525 '44007 ' 5.0 DAT NDBC 80 - -70.651 42.346 '44013 ' 5.0 DAT NDBC 80 - -70.186 41.439 '44020 ' 5.0 DAT NDBC 80 - -70.566 42.523 '44029 ' 4.0 DAT NDBC 80 - -70.428 43.181 '44030 ' 4.0 DAT NDBC 80 - -70.060 43.570 '44031 ' 4.0 DAT NDBC 80 - -69.355 43.716 '44032 ' 4.0 DAT NDBC 80 - -68.998 44.055 '44033 ' 4.0 DAT NDBC 80 - -68.109 44.106 '44034 ' 4.0 DAT NDBC 80 - -72.655 41.138 '44039 ' 3.5 DAT NDBC 80 - -73.580 40.956 '44040 ' 3.5 DAT NDBC 80 - -76.391 39.152 '44043 ' 3.0 DAT NDBC 80 - -75.183 38.883 '44054 ' 999.0 DAT NDBC 80 - -75.256 39.122 '44055 ' 999.0 DAT NDBC 80 - -76.257 37.567 '44058 ' 3.0 DAT NDBC 80 - -72.067 41.263 '44060 ' 3.5 DAT NDBC 80 - -77.036 38.788 '44061 ' 2.0 DAT NDBC 80 - -76.415 38.556 '44062 ' 3.0 DAT NDBC 80 - -76.448 38.963 '44063 ' 3.0 DAT NDBC 80 - -76.087 36.998 '44064 ' 3.0 DAT NDBC 80 - -73.703 40.369 '44065 ' 5.0 DAT NDBC 80 - -76.266 37.201 '44072 ' 3.0 DAT NDBC 80 - -75.334 37.757 '44089 ' 999.0 DAT NDBC 80 - -70.329 41.840 '44090 ' 999.0 DAT NDBC 80 - -73.769 39.778 '44091 ' 999.0 DAT NDBC 80 - -70.632 42.942 '44092 ' 999.0 DAT NDBC 80 - -73.106 40.585 '44094 ' 999.0 DAT NDBC 80 - -63.408 44.500 '44172 ' 999.0 DAT NDBC 360 - -57.341 47.263 '44235 ' 999.0 DAT NDBC 360 - -76.149 37.024 '44087 ' 999.0 DAT NDBC 360 -$AGNT41 -$ -$AGNT43 -$ Western Atlantic (NT) spectra (4) near South America (3) - -48.13 -27.70 '31201 ' 999. DAT SCRIPPS 180 - -34.567 -8.15 '31052 ' 999. DAT PNBOIA 180 - -43.088 -23.031 '31260 ' 999. DAT PNBOIA 180 - -47.367 -28.5 '31374 ' 999. DAT PNBOIA 180 - -44.933 -25.283 '31051 ' 999. DAT PNBOIA 180 - -51.353 -32.595 '31053 ' 999. DAT PNBOIA 180 - -49.81 -31.52 '31375 ' 999. DAT WMO 360 -$AGNT43 -$ -$AGXT43 -$ Tropical Belt (XT) spectra (4) near South America (3) - -53.08 14.55 '41040 ' 5.0 DAT NDBC 360 - -46.00 14.53 '41041 ' 5.0 DAT NDBC 360 - -57.90 15.90 '41100 ' 5.0 DAT METFR 360 - -56.20 14.60 '41101 ' 5.0 DAT METFR 360 - -50.949 14.754 '41060 ' 2.7 DAT NDBC 360 - -60.848 11.185 '42087 ' 3.4 DAT NDBC 360 - -60.521 11.301 '42088 ' 3.4 DAT NDBC 360 -$AGXT43 -$ -$AGXT40 -$ Tropical Belt (XT) spectra (4) in Pacific Ocean and Pacific Isles (0) - -125.032 10.051 '43010 ' 3.5 DAT NDBC 360 - -144.668 13.729 '52009 ' 5.0 DAT NDBC 360 -$AGXT40 -$ -$AGET43 -$ Eastern Atlantic (ET) spectra (3) near Europe (3) - -5.00 45.20 '62001 ' 3.0 DAT UKMO 360 - -20.00 41.60 '62002 ' 999. DAT UNKNOWN 360 - -12.40 48.70 '62029 ' 3.0 DAT UKMO 360 - -7.90 51.40 '62023 ' 999. DAT UNKNOWN 360 - -5.60 48.50 '62052 ' 999. DAT METFR 100 - -13.30 51.00 '62081 ' 3.0 DAT UKMO 360 - -11.20 53.13 '62090 ' 4.5 DAT IDT 100 - -5.42 53.47 '62091 ' 4.5 DAT IDT 60 - -10.55 51.22 '62092 ' 4.5 DAT IDT 100 - -9.07 54.67 '62093 ' 4.5 DAT IDT 60 - -6.70 51.69 '62094 ' 4.5 DAT IDT 60 - -15.92 53.06 '62095 ' 4.5 DAT IDT 100 - -2.90 49.90 '62103 ' 14.0 DAT UKMO 360 - -12.36 54.54 '62105 ' 3.0 DAT UKMO 360 - -9.90 57.00 '62106 ' 4.5 DAT UKMO 360 - -6.10 50.10 '62107 ' 14.0 DAT UKMO 360 - -19.50 53.50 '62108 ' 3.0 DAT UKMO 360 - -8.50 47.50 '62163 ' 3.0 DAT UKMO 360 - -4.70 52.30 '62301 ' 3.0 DAT UKMO 25 - -5.10 51.60 '62303 ' 3.0 DAT UKMO 25 - 0.00 50.40 '62305 ' 14.0 DAT UKMO 25 - 2.00 51.40 '62170 ' 999.0 DAT UKMO 25 - -11.40 59.10 '64045 ' 3.0 DAT UKMO 360 - -4.50 60.70 '64046 ' 3.0 DAT UKMO 360 - -23.10 64.05 'TFGSK ' 999. DAT UNKNOWN 60 - -15.20 64.00 'TFHFN ' 999. DAT UNKNOWN 60 - -20.35 63.00 'TFSRT ' 999. DAT UNKNOWN 60 - 7.80 64.30 'LF3F ' 999. DAT UNKNOWN 360 - 1.10 55.30 '62026 ' 999. DAT UNKNOWN 360 - 0.00 57.00 '62109 ' 999. DAT UNKNOWN 25 - 0.40 58.10 '62111 ' 999. DAT UNKNOWN 25 - 1.30 58.70 '62112 ' 999. DAT UNKNOWN 25 - 1.40 57.70 '62116 ' 999. DAT UNKNOWN 360 - 0.00 57.90 '62117 ' 999. DAT UNKNOWN 15 - 2.00 57.00 '62119 ' 999. DAT UNKNOWN 25 - 1.40 58.70 '62128 ' 999. DAT UNKNOWN 25 - 2.00 56.40 '62132 ' 999. DAT UNKNOWN 25 - 1.00 57.10 '62133 ' 999. DAT UNKNOWN 15 - 2.10 53.00 '62142 ' 999. DAT PRIV 30 - 1.80 57.70 '62143 ' 999. DAT UNKNOWN 25 - 1.70 53.40 '62144 ' 999. DAT PRIV 45 - 2.80 53.10 '62145 ' 999. DAT PRIV 360 - 1.80 57.00 '62152 ' 999. DAT UNKNOWN 25 - 0.50 57.40 '62162 ' 999. DAT UNKNOWN 25 - 0.50 57.20 '62164 ' 999. DAT PRIV 15 - 1.90 51.10 '62304 ' 14.0 DAT UKMO 25 - 1.70 60.60 '63055 ' 999. DAT UNKNOWN 25 - 1.60 59.50 '63056 ' 999. DAT UNKNOWN 25 - 1.50 59.20 '63057 ' 999. DAT UNKNOWN 360 - 1.10 61.20 '63103 ' 999. DAT UNKNOWN 15 - 1.70 60.80 '63108 ' 999. DAT UNKNOWN 15 - 1.50 59.50 '63110 ' 999. DAT PRIV 15 - 1.00 61.10 '63112 ' 999. DAT PRIV 360 - 1.70 61.00 '63113 ' 999. DAT PRIV 100 - 1.30 61.60 '63115 ' 999. DAT PRIV 25 - 2.30 61.20 'LF3J ' 999. DAT UNKNOWN 25 - 3.70 60.60 'LF4B ' 999. DAT UNKNOWN 360 - 2.20 59.60 'LF4H ' 999. DAT UNKNOWN 25 - 1.90 58.40 'LF4C ' 999. DAT UNKNOWN 25 - 3.20 56.50 'LF5U ' 999. DAT UNKNOWN 60 - 3.28 51.99 'EURO ' 999. DAT MVEW 60 - 3.22 53.22 'K13 ' 999. DAT MVEW 25 - -3.03 43.63 '62024 ' 999. DAT PDES 25 - -7.62 44.07 '62082 ' 999. DAT PDES 25 - -9.40 42.12 '62084 ' 999. DAT PDES 25 - -6.97 36.48 '62085 ' 999. DAT PDES 25 - -15.82 28.18 '13130 ' 999. DAT PDES 25 - -16.58 28.00 '13131 ' 999. DAT PDES 25 - 0.90 57.70 '62118 ' 999. DAT UNKNOWN 15 - 2.10 57.10 '62146 ' 999. DAT UNKNOWN 25 - 6.33 55.00 'BSH01 ' 999. DAT UNKNOWN 60 - 7.89 54.16 'BSH02 ' 999. DAT UNKNOWN 60 - 8.12 54.00 'BSH03 ' 999. DAT UNKNOWN 60 - 6.58 54.00 'BSH04 ' 999. DAT UNKNOWN 60 - 8.22 54.92 'BSH05 ' 999. DAT UNKNOWN 60 -$AGET43 -$ -$AGAC43 -$ Arctic Ocean (AC) spectra (4) non-descript (3) - -25.00 65.69 'TFBLK ' 999. DAT UNKNOWN 60 - -18.20 66.50 'TFGRS ' 999. DAT UNKNOWN 60 - -13.50 65.65 'TFKGR ' 999. DAT UNKNOWN 60 - 7.30 65.30 'LF3N ' 999. DAT UNKNOWN 60 - 8.10 66.00 'LF5T ' 999. DAT UNKNOWN 360 - 2.00 66.00 'LDWR ' 999. DAT UNKNOWN 360 - 21.10 71.60 '3FYT ' 999. DAT UNKNOWN 360 - 15.50 73.50 'LFB1 ' 999. DAT OCNOR 360 - 30.00 74.00 'LFB2 ' 999. DAT OCNOR 360 - -9.26 68.48 '64071 ' 999. DAT UNKNOWN 60 - -166.071 70.025 '48012 ' 3.0 DAT NDBC 360 - -169.454 65.011 '48114 ' 999.0 DAT NDBC 360 - -146.040 70.370 '48211 ' 999.0 DAT NDBC 360 - -150.279 70.874 '48212 ' 999.0 DAT NDBC 360 - -164.133 71.502 '48213 ' 999.0 DAT NDBC 360 - -165.248 70.872 '48214 ' 999.0 DAT NDBC 360 - -167.952 71.758 '48216 ' 999.0 DAT NDBC 360 -$AGAC43 -$ -$AGIO45 -$ Indian Ocean (I) spectra (4) non-descript (5) - 72.49 17.02 '23092 ' 999. DAT UNKNOWN 20 - 73.75 15.40 '23093 ' 999. DAT UNKNOWN 120 - 74.50 12.94 '23094 ' 999. DAT UNKNOWN 120 - 80.39 13.19 '23096 ' 999. DAT UNKNOWN 120 - 69.24 15.47 '23097 ' 999. DAT UNKNOWN 360 - 72.51 10.65 '23098 ' 999. DAT UNKNOWN 360 - 90.74 12.14 '23099 ' 999. DAT UNKNOWN 360 - 87.56 18.35 '23100 ' 999. DAT UNKNOWN 120 - 83.27 13.97 '23101 ' 999. DAT UNKNOWN 360 - 87.50 15.00 '23168 ' 999. DAT UNKNOWN 360 - 90.14 18.13 '23169 ' 999. DAT UNKNOWN 360 - 72.66 8.33 '23170 ' 999. DAT UNKNOWN 360 - 72.00 12.50 '23172 ' 999. DAT UNKNOWN 360 - 78.57 8.21 '23173 ' 999. DAT UNKNOWN 120 - 81.53 11.57 '23174 ' 999. DAT UNKNOWN 360 - 116.14 -19.59 '56002 ' 999. DAT UNKNOWN 120 - 115.40 -32.11 '56005 ' 999. DAT UNKNOWN 50 - 114.78 -33.36 '56006 ' 999. DAT UNKNOWN 120 - 114.94 -21.41 '56007 ' 999. DAT UNKNOWN 50 - 22.17 -34.97 'AGULHAS_FA' 10.0 DAT PRIV 360 - 121.90 -34.00 '56010 ' 999. DAT UNKNOWN 50 - 114.10 -21.70 '56012 ' 999. DAT UNKNOWN 50 - 85.00 12.60 '23167 ' 999. DAT UNKNOWN 360 - 70.00 11.02 '23171 ' 999. DAT UNKNOWN 360 - 91.66 10.52 '23451 ' 999. DAT UNKNOWN 120 - 89.04 10.97 '23455 ' 999. DAT UNKNOWN 120 - 86.98 9.99 '23456 ' 999. DAT UNKNOWN 120 - 70.10 5.16 '23491 ' 999. DAT UNKNOWN 120 - 68.08 13.89 '23492 ' 999. DAT UNKNOWN 120 - 66.98 11.12 '23493 ' 999. DAT UNKNOWN 120 - 75.00 6.46 '23494 ' 999. DAT UNKNOWN 120 - 68.97 7.13 '23495 ' 999. DAT UNKNOWN 120 -$AGIO45 -$ -$ END of AWIPS Section -$ -$ South America DAT - -77.50 6.26 '32488 ' 999. DAT DIMAR 45 - -77.74 3.52 '32487 ' 999. DAT DIMAR 45 - -72.22 12.35 '41193 ' 999. DAT DIMAR 120 -$ Japanese buoys DAT -$ South Korean buoys DAT - 129.78 36.35 '22106 ' 999. DAT SKOREA 100 - 126.33 33.00 '22107 ' 999. DAT SKOREA 100 -$ Africa DAT - 57.70 -20.45 'MAUR01 ' 999. DAT WMO 360 - 57.75 -20.10 'MAUR02 ' 999. DAT WMO 360 -$ End of multi_1 buoy file -$ - 0.00 0.00 'STOPSTRING' 999. XXX NCEP 0 diff --git a/parm/wave/wave_gfs.buoys b/parm/wave/wave_gfs.buoys new file mode 120000 index 0000000000..6f47adefac --- /dev/null +++ b/parm/wave/wave_gfs.buoys @@ -0,0 +1 @@ +wave_gfs.buoys.full \ No newline at end of file diff --git a/parm/wave/wave_gfs.buoys.dat b/parm/wave/wave_gfs.buoys.dat index c7c1aec357..0d2f4ab11e 100755 --- a/parm/wave/wave_gfs.buoys.dat +++ b/parm/wave/wave_gfs.buoys.dat @@ -63,7 +63,7 @@ $ Gulf of Alaska (AG) Spectral data (4) near S/SW Alaska Anchorage (8) -152.230 59.050 '46079 ' 4.9 DAT NDBC 45 -152.233 59.049 '46105 ' 2.0 DAT NDBC 45 -147.992 59.925 '46107 ' 2.0 DAT NDBC 45 - -165.446 64.489 '46265 ' 2.0 DAT NDBC 45 + -165.475 64.473 '46265 ' 2.0 DAT NDBC 45 $AGGA48 $ $AGGA47 @@ -113,12 +113,12 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -118.00 32.50 '46086 ' 5.0 DAT NDBC 45 -125.77 45.88 '46089 ' 5.0 DAT NDBC 45 -124.74 40.29 '46213 ' 999. DAT SCRIPPS 25 - -123.47 37.95 '46214 ' 999. DAT SCRIPPS 45 + -123.465 37.9403 '46214 ' 999. DAT SCRIPPS 45 -119.80 34.33 '46216 ' 999. DAT SCRIPPS 15 -119.43 34.17 '46217 ' 999. DAT SCRIPPS 15 -120.78 34.45 '46218 ' 999. DAT SCRIPPS 25 -119.88 33.22 '46219 ' 999. DAT SCRIPPS 45 - -118.63 33.85 '46221 ' 999. DAT SCRIPPS 15 + -118.641 33.8599 '46221 ' 999. DAT SCRIPPS 15 -118.32 33.62 '46222 ' 999. DAT SCRIPPS 15 -117.77 33.46 '46223 ' 999. DAT SCRIPPS 15 -117.47 33.18 '46224 ' 999. DAT SCRIPPS 15 @@ -126,7 +126,7 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -117.44 32.63 '46227 ' 999. DAT SCRIPPS 15 -124.55 43.77 '46229 ' 999. DAT SCRIPPS 25 -117.37 32.75 '46231 ' 999. DAT SCRIPPS 15 - -117.421 32.530 '46232 ' 999. DAT SCRIPPS 15 + -117.425 32.517 '46232 ' 999. DAT SCRIPPS 15 -120.86 35.20 '46215 ' 999. DAT SCRIPPS 45 -121.95 36.76 '46236 ' 999. DAT SCRIPPS 15 -122.634 37.787 '46237 ' 999. DAT SCRIPPS 15 @@ -136,7 +136,7 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -124.13 46.22 '46243 ' 999. DAT SCRIPPS 45 -124.36 40.89 '46244 ' 999. DAT SCRIPPS 45 -145.20 50.033 '46246 ' 999. DAT SCRIPPS 45 - -124.67 46.13 '46248 ' 999. DAT SCRIPPS 45 + -124.644 46.133 '46248 ' 999. DAT SCRIPPS 45 -119.200 33.000 '46024 ' 10.0 DAT NDBC 45 -121.899 36.835 '46091 ' 4.0 DAT NDBC 45 -122.030 36.750 '46092 ' 4.0 DAT NDBC 45 @@ -144,7 +144,7 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -124.300 44.642 '46094 ' 3.0 DAT NDBC 45 -124.304 44.639 '46097 ' 4.5 DAT NDBC 45 -124.956 44.381 '46098 ' 4.5 DAT NDBC 45 - -122.351 36.723 '46114 ' 999.0 DAT NDBC 45 + -122.33 36.685 '46114 ' 999.0 DAT NDBC 45 -124.313 40.753 '46212 ' 999.0 DAT NDBC 45 -117.353 32.848 '46226 ' 999.0 DAT NDBC 45 -117.320 32.936 '46233 ' 3.0 DAT NDBC 45 @@ -153,7 +153,7 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -122.833 37.753 '46247 ' 999.0 DAT NDBC 45 -119.708 33.821 '46249 ' 999.0 DAT NDBC 45 -119.090 34.034 '46250 ' 999.0 DAT NDBC 45 - -119.550 33.760 '46251 ' 999.0 DAT NDBC 45 + -119.564 33.769 '46251 ' 999.0 DAT NDBC 45 -119.257 33.953 '46252 ' 999.0 DAT NDBC 45 -118.181 33.576 '46253 ' 999.0 DAT NDBC 45 -117.267 32.868 '46254 ' 999.0 DAT NDBC 45 @@ -214,9 +214,9 @@ $ Hawaiian waters (HW) spectra (4) in Pacific Ocean and Pacific Isles (0) -153.87 0.02 '51028 ' 5.0 DAT NDBC 11 -158.303 21.096 '51200 ' 999.0 DAT NDBC 11 -159.574 22.285 '51208 ' 999. DAT SCRIPPS 11 - -170.493 -14.264 '51209 ' 999.0 DAT NDBC 360 + -170.5 -14.273 '51209 ' 999.0 DAT NDBC 360 -157.756 21.477 '51210 ' 999.0 DAT NDBC 11 - -134.667 7.630 '52212 ' 999.0 DAT NDBC 360 + 134.670 7.692 '52212 ' 999.0 DAT NDBC 360 -157.959 21.297 '51211 ' 999.0 DAT NDBC 360 -158.150 21.323 '51212 ' 999.0 DAT NDBC 360 -157.003 20.750 '51213 ' 999.0 DAT NDBC 360 @@ -230,7 +230,7 @@ $ Western Pacific (PW) spectra (4) in Pacific Ocean and Pacific Isles (0) 127.50 34.00 '22103 ' 999. DAT SKOREA 100 128.90 34.77 '22104 ' 999. DAT SKOREA 100 130.00 37.53 '22105 ' 999. DAT SKOREA 100 - 171.40 7.09 '52201 ' 999. DAT SCRIPPS 360 + 171.391 7.038 '52201 ' 999. DAT SCRIPPS 360 144.80 13.68 '52202 ' 999. DAT SCRIPPS 360 145.66 15.27 '52211 ' 999. DAT SCRIPPS 360 133.62 33.19 '21178 ' 999. DAT WMO 360 @@ -268,7 +268,7 @@ $ Gulf of Mexico (GX) spectra (4) south from NC and Puerto Rico (2) -90.46 27.50 '42041 ' 5.0 DAT NDBC 90 -92.55 27.42 '42038 ' 5.0 DAT NDBC 90 -94.05 22.01 '42055 ' 10.0 DAT NDBC 360 - -84.274 27.345 '42099 ' 999. DAT SCRIPPS 100 + -84.275 27.348 '42099 ' 999. DAT SCRIPPS 100 -87.55 30.06 '42012 ' 5.0 DAT NDBC 90 -88.49 28.19 '42887 ' 48.2 DAT BP 90 -82.924 27.173 '42013 ' 3.1 DAT NDBC 90 @@ -340,7 +340,7 @@ $ Western Atlantic (NT) spectra (4) south from NC and Puerto Rico (2) -70.99 24.00 '41046 ' 5.0 DAT NDBC 90 -71.49 27.47 '41047 ' 10.0 DAT NDBC 90 -69.65 31.98 '41048 ' 10.0 DAT NDBC 90 - -81.29 30.72 '41112 ' 999. DAT SCRIPPS 30 + -81.292 30.709 '41112 ' 999. DAT SCRIPPS 30 -80.53 28.40 '41113 ' 999. DAT SCRIPPS 30 -80.22 27.55 '41114 ' 999. DAT SCRIPPS 30 -74.84 36.61 '44014 ' 5.0 DAT NDBC 90 @@ -398,7 +398,7 @@ $ Western Atlantic (NT) spectra (4) NE states north of VA (1) -75.492 36.872 '44093 ' 999. DAT SCRIPPS 80 -75.33 35.75 '44095 ' 999. DAT SCRIPPS 80 -75.809 37.023 '44096 ' 999. DAT SCRIPPS 80 - -71.12 40.98 '44097 ' 999. DAT SCRIPPS 80 + -71.126 40.967 '44097 ' 999. DAT SCRIPPS 80 -70.17 42.80 '44098 ' 999. DAT SCRIPPS 80 -70.141 43.525 '44007 ' 5.0 DAT NDBC 80 -70.651 42.346 '44013 ' 5.0 DAT NDBC 80 @@ -424,7 +424,7 @@ $ Western Atlantic (NT) spectra (4) NE states north of VA (1) -76.266 37.201 '44072 ' 3.0 DAT NDBC 80 -75.334 37.757 '44089 ' 999.0 DAT NDBC 80 -70.329 41.840 '44090 ' 999.0 DAT NDBC 80 - -73.769 39.778 '44091 ' 999.0 DAT NDBC 80 + -73.77 39.77 '44091 ' 999.0 DAT NDBC 80 -70.632 42.942 '44092 ' 999.0 DAT NDBC 80 -73.106 40.585 '44094 ' 999.0 DAT NDBC 80 -63.408 44.500 '44172 ' 999.0 DAT NDBC 360 diff --git a/parm/wave/wave_gfs.buoys.full b/parm/wave/wave_gfs.buoys.full index f3e9cc0444..086de45582 100755 --- a/parm/wave/wave_gfs.buoys.full +++ b/parm/wave/wave_gfs.buoys.full @@ -67,7 +67,7 @@ $ Gulf of Alaska (AG) Spectral data (4) near S/SW Alaska Anchorage (8) -152.230 59.050 '46079 ' 4.9 DAT NDBC 45 -152.233 59.049 '46105 ' 2.0 DAT NDBC 45 -147.992 59.925 '46107 ' 2.0 DAT NDBC 45 - -165.446 64.489 '46265 ' 2.0 DAT NDBC 45 + -165.475 64.473 '46265 ' 2.0 DAT NDBC 45 $AGGA48 $ $AGGA47 @@ -117,12 +117,12 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -118.00 32.50 '46086 ' 5.0 DAT NDBC 45 -125.77 45.88 '46089 ' 5.0 DAT NDBC 45 -124.74 40.29 '46213 ' 999. DAT SCRIPPS 25 - -123.47 37.95 '46214 ' 999. DAT SCRIPPS 45 + -123.465 37.9403 '46214 ' 999. DAT SCRIPPS 45 -119.80 34.33 '46216 ' 999. DAT SCRIPPS 15 -119.43 34.17 '46217 ' 999. DAT SCRIPPS 15 -120.78 34.45 '46218 ' 999. DAT SCRIPPS 25 -119.88 33.22 '46219 ' 999. DAT SCRIPPS 45 - -118.63 33.85 '46221 ' 999. DAT SCRIPPS 15 + -118.641 33.8599 '46221 ' 999. DAT SCRIPPS 15 -118.32 33.62 '46222 ' 999. DAT SCRIPPS 15 -117.77 33.46 '46223 ' 999. DAT SCRIPPS 15 -117.47 33.18 '46224 ' 999. DAT SCRIPPS 15 @@ -130,7 +130,7 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -117.44 32.63 '46227 ' 999. DAT SCRIPPS 15 -124.55 43.77 '46229 ' 999. DAT SCRIPPS 25 -117.37 32.75 '46231 ' 999. DAT SCRIPPS 15 - -117.421 32.530 '46232 ' 999. DAT SCRIPPS 15 + -117.425 32.517 '46232 ' 999. DAT SCRIPPS 15 -117.75 32.64 'SGX01 ' 999. VBY NCEP 25 -118.00 30.00 'TPC50 ' 999. VBY NCEP 360 -135.00 20.00 'TPC51 ' 999. VBY NCEP 360 @@ -161,7 +161,7 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -124.13 46.22 '46243 ' 999. DAT SCRIPPS 45 -124.36 40.89 '46244 ' 999. DAT SCRIPPS 45 -145.20 50.033 '46246 ' 999. DAT SCRIPPS 45 - -124.67 46.13 '46248 ' 999. DAT SCRIPPS 45 + -124.644 46.133 '46248 ' 999. DAT SCRIPPS 45 -119.200 33.000 '46024 ' 10.0 DAT NDBC 45 -121.899 36.835 '46091 ' 4.0 DAT NDBC 45 -122.030 36.750 '46092 ' 4.0 DAT NDBC 45 @@ -169,7 +169,7 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -124.300 44.642 '46094 ' 3.0 DAT NDBC 45 -124.304 44.639 '46097 ' 4.5 DAT NDBC 45 -124.956 44.381 '46098 ' 4.5 DAT NDBC 45 - -122.351 36.723 '46114 ' 999.0 DAT NDBC 45 + -122.33 36.685 '46114 ' 999.0 DAT NDBC 45 -124.313 40.753 '46212 ' 999.0 DAT NDBC 45 -117.353 32.848 '46226 ' 999.0 DAT NDBC 45 -117.320 32.936 '46233 ' 3.0 DAT NDBC 45 @@ -178,7 +178,7 @@ $ Eastern Pacific (PZ) spectral data (4) near Pacific states and SBC (6) -122.833 37.753 '46247 ' 999.0 DAT NDBC 45 -119.708 33.821 '46249 ' 999.0 DAT NDBC 45 -119.090 34.034 '46250 ' 999.0 DAT NDBC 45 - -119.550 33.760 '46251 ' 999.0 DAT NDBC 45 + -119.564 33.769 '46251 ' 999.0 DAT NDBC 45 -119.257 33.953 '46252 ' 999.0 DAT NDBC 45 -118.181 33.576 '46253 ' 999.0 DAT NDBC 45 -117.267 32.868 '46254 ' 999.0 DAT NDBC 45 @@ -247,9 +247,9 @@ $ Hawaiian waters (HW) spectra (4) in Pacific Ocean and Pacific Isles (0) -153.87 0.02 '51028 ' 5.0 DAT NDBC 11 -158.303 21.096 '51200 ' 999.0 DAT NDBC 11 -159.574 22.285 '51208 ' 999. DAT SCRIPPS 11 - -170.493 -14.264 '51209 ' 999.0 DAT NDBC 360 + -170.5 -14.273 '51209 ' 999.0 DAT NDBC 360 -157.756 21.477 '51210 ' 999.0 DAT NDBC 11 - -134.667 7.630 '52212 ' 999.0 DAT NDBC 360 + 134.670 7.692 '52212 ' 999.0 DAT NDBC 360 -157.959 21.297 '51211 ' 999.0 DAT NDBC 360 -158.150 21.323 '51212 ' 999.0 DAT NDBC 360 -157.003 20.750 '51213 ' 999.0 DAT NDBC 360 @@ -274,7 +274,7 @@ $ Western Pacific (PW) spectra (4) in Pacific Ocean and Pacific Isles (0) 127.50 34.00 '22103 ' 999. DAT SKOREA 100 128.90 34.77 '22104 ' 999. DAT SKOREA 100 130.00 37.53 '22105 ' 999. DAT SKOREA 100 - 171.40 7.09 '52201 ' 999. DAT SCRIPPS 360 + 171.391 7.038 '52201 ' 999. DAT SCRIPPS 360 144.80 13.68 '52202 ' 999. DAT SCRIPPS 360 145.66 15.27 '52211 ' 999. DAT SCRIPPS 360 145.00 14.70 'SAIPAN_W ' 999. VBY NCEP 360 @@ -353,7 +353,7 @@ $ Gulf of Mexico (GX) spectra (4) south from NC and Puerto Rico (2) -90.46 27.50 '42041 ' 5.0 DAT NDBC 90 -92.55 27.42 '42038 ' 5.0 DAT NDBC 90 -94.05 22.01 '42055 ' 10.0 DAT NDBC 360 - -84.274 27.345 '42099 ' 999. DAT SCRIPPS 100 + -84.275 27.348 '42099 ' 999. DAT SCRIPPS 100 -86.00 23.00 'TPC26 ' 999. VBY NCEP 360 -87.55 30.06 '42012 ' 5.0 DAT NDBC 90 -88.49 28.19 '42887 ' 48.2 DAT BP 90 @@ -441,7 +441,7 @@ $ Western Atlantic (NT) spectra (4) south from NC and Puerto Rico (2) -70.99 24.00 '41046 ' 5.0 DAT NDBC 90 -71.49 27.47 '41047 ' 10.0 DAT NDBC 90 -69.65 31.98 '41048 ' 10.0 DAT NDBC 90 - -81.29 30.72 '41112 ' 999. DAT SCRIPPS 30 + -81.292 30.709 '41112 ' 999. DAT SCRIPPS 30 -80.53 28.40 '41113 ' 999. DAT SCRIPPS 30 -80.22 27.55 '41114 ' 999. DAT SCRIPPS 30 -74.84 36.61 '44014 ' 5.0 DAT NDBC 90 @@ -518,7 +518,7 @@ $ Western Atlantic (NT) spectra (4) NE states north of VA (1) -75.492 36.872 '44093 ' 999. DAT SCRIPPS 80 -75.33 35.75 '44095 ' 999. DAT SCRIPPS 80 -75.809 37.023 '44096 ' 999. DAT SCRIPPS 80 - -71.12 40.98 '44097 ' 999. DAT SCRIPPS 80 + -71.126 40.967 '44097 ' 999. DAT SCRIPPS 80 -70.17 42.80 '44098 ' 999. DAT SCRIPPS 80 -70.141 43.525 '44007 ' 5.0 DAT NDBC 80 -70.651 42.346 '44013 ' 5.0 DAT NDBC 80 @@ -544,7 +544,7 @@ $ Western Atlantic (NT) spectra (4) NE states north of VA (1) -76.266 37.201 '44072 ' 3.0 DAT NDBC 80 -75.334 37.757 '44089 ' 999.0 DAT NDBC 80 -70.329 41.840 '44090 ' 999.0 DAT NDBC 80 - -73.769 39.778 '44091 ' 999.0 DAT NDBC 80 + -73.77 39.77 '44091 ' 999.0 DAT NDBC 80 -70.632 42.942 '44092 ' 999.0 DAT NDBC 80 -73.106 40.585 '44094 ' 999.0 DAT NDBC 80 -63.408 44.500 '44172 ' 999.0 DAT NDBC 360 diff --git a/parm/wave/ww3_grib2.glo_025.inp.tmpl b/parm/wave/ww3_grib2.glo_025.inp.tmpl new file mode 100755 index 0000000000..ddfabdb13d --- /dev/null +++ b/parm/wave/ww3_grib2.glo_025.inp.tmpl @@ -0,0 +1,9 @@ +$ WAVEWATCH-III gridded output input file +$ ---------------------------------------- +TIME DT NT +N +FLAGS +$ +TIME 7 MODNR GRIDNR 0 0 +$ +$ end of input file diff --git a/parm/wave/ww3_grib2.reg025.inp.tmpl b/parm/wave/ww3_grib2.reg025.inp.tmpl new file mode 100755 index 0000000000..ddfabdb13d --- /dev/null +++ b/parm/wave/ww3_grib2.reg025.inp.tmpl @@ -0,0 +1,9 @@ +$ WAVEWATCH-III gridded output input file +$ ---------------------------------------- +TIME DT NT +N +FLAGS +$ +TIME 7 MODNR GRIDNR 0 0 +$ +$ end of input file diff --git a/parm/wave/ww3_shel.gfs.inp.tmpl b/parm/wave/ww3_shel.gfs.inp.tmpl new file mode 100644 index 0000000000..0b9b335e1b --- /dev/null +++ b/parm/wave/ww3_shel.gfs.inp.tmpl @@ -0,0 +1,42 @@ +$ -------------------------------------------------------------------- $ +$ WAVEWATCH III shel input file $ +$ -------------------------------------------------------------------- $ +$ Include ice and mud parameters only if IC1/2/3/4 used : + F F Water levels + CURRLINE + WINDLINE + ICELINE + F F Atmospheric momentum + F F Air density + F Assimilation data : Mean parameters + F Assimilation data : 1-D spectra + F Assimilation data : 2-D spectra +$ + RUN_BEG + RUN_END +$ +$ IOSTYP + IOSRV +$ + OUT_BEG DTFLD OUT_END GOFILETYPE + N + OUTPARS +$ + OUT_BEG DTPNT OUT_END POFILETYPE +BUOY_FILE +$ + OUT_BEG 0 OUT_END +$ +$ Keep next two lines formatting as is to allow proper parsing + RST_BEG DTRST RST_END RSTTYPE +RST_2_BEG DT_2_RST RST_2_END +$ + OUT_BEG 0 OUT_END +$ + OUT_BEG 0 OUT_END +$ + 'the_end' 0 +$ + 'STP' +$ +$ End of input file diff --git a/scripts/exgdas_atmos_chgres_forenkf.sh b/scripts/exgdas_atmos_chgres_forenkf.sh new file mode 100755 index 0000000000..25d034ef47 --- /dev/null +++ b/scripts/exgdas_atmos_chgres_forenkf.sh @@ -0,0 +1,191 @@ +#! /usr/bin/env bash +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_atmos_chgres_forenkf.sh +# Script description: Runs chgres on full-resolution forecast for EnKF recentering +# +# Author: Cory Martin Org: NCEP/EMC Date: 2020-06-08 +# +# Abstract: This script runs chgres on full-resolution forecast for later +# use in the EnKF recentering step +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +pwd=$(pwd) +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} + +# Base variables +CDATE=${CDATE:-"2001010100"} +CDUMP=${CDUMP:-"enkfgdas"} +GDUMP=${GDUMP:-"gdas"} + +# Derived base variables +GDATE=$($NDATE -$assim_freq $CDATE) +BDATE=$($NDATE -3 $CDATE) +PDY=$(echo $CDATE | cut -c1-8) +cyc=$(echo $CDATE | cut -c9-10) +bPDY=$(echo $BDATE | cut -c1-8) +bcyc=$(echo $BDATE | cut -c9-10) + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} + +# IAU +DOIAU=${DOIAU:-"NO"} +export IAUFHRS=${IAUFHRS:-"6"} + +# Dependent Scripts and Executables +export APRUN_CHGRES=${APRUN_CHGRES:-${APRUN:-""}} +export CHGRESNCEXEC=${CHGRESNCEXEC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x} +export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1} +APRUNCFP=${APRUNCFP:-""} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} + +# level info file +SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS}.txt} + +# forecast files +APREFIX=${APREFIX:-""} +APREFIX_ENS=${APREFIX_ENS:-""} +# at full resolution +ATMF03=${ATMF03:-${COM_ATMOS_HISTORY}/${APREFIX}atmf003.nc} +ATMF04=${ATMF04:-${COM_ATMOS_HISTORY}/${APREFIX}atmf004.nc} +ATMF05=${ATMF05:-${COM_ATMOS_HISTORY}/${APREFIX}atmf005.nc} +ATMF06=${ATMF06:-${COM_ATMOS_HISTORY}/${APREFIX}atmf006.nc} +ATMF07=${ATMF07:-${COM_ATMOS_HISTORY}/${APREFIX}atmf007.nc} +ATMF08=${ATMF08:-${COM_ATMOS_HISTORY}/${APREFIX}atmf008.nc} +ATMF09=${ATMF09:-${COM_ATMOS_HISTORY}/${APREFIX}atmf009.nc} +# at ensemble resolution +ATMF03ENS=${ATMF03ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf003.ensres.nc} +ATMF04ENS=${ATMF04ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf004.ensres.nc} +ATMF05ENS=${ATMF05ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf005.ensres.nc} +ATMF06ENS=${ATMF06ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf006.ensres.nc} +ATMF07ENS=${ATMF07ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf007.ensres.nc} +ATMF08ENS=${ATMF08ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf008.ensres.nc} +ATMF09ENS=${ATMF09ENS:-${COM_ATMOS_HISTORY}/${APREFIX}atmf009.ensres.nc} +ATMFCST_ENSRES=${ATMFCST_ENSRES:-${COM_ATMOS_HISTORY_MEM}/${APREFIX_ENS}atmf006.nc} + +# Set script / GSI control parameters +DOHYBVAR=${DOHYBVAR:-"NO"} +lrun_subdirs=${lrun_subdirs:-".true."} +USE_CFP=${USE_CFP:-"NO"} +CFP_MP=${CFP_MP:-"NO"} +nm="" +if [ $CFP_MP = "YES" ]; then + nm=0 +fi +if [ $DOHYBVAR = "YES" ]; then + l_hyb_ens=.true. + export l4densvar=${l4densvar:-".false."} + export lwrite4danl=${lwrite4danl:-".false."} +else + echo "DOHYBVAR != YES, this script will exit without regridding deterministic forecast" + exit 0 +fi + +################################################################################ +################################################################################ + +# get resolution information +LONB_ENKF=${LONB_ENKF:-$($NCLEN $ATMFCST_ENSRES grid_xt)} # get LONB_ENKF +LATB_ENKF=${LATB_ENKF:-$($NCLEN $ATMFCST_ENSRES grid_yt)} # get LATB_ENFK +LEVS_ENKF=${LEVS_ENKF:-$($NCLEN $ATMFCST_ENSRES pfull)} # get LATB_ENFK + +############################################################## +# If analysis increment is written by GSI, regrid forecasts to increment resolution +if [ $DO_CALC_ANALYSIS == "YES" ]; then + $NLN $ATMF06 fcst.06 + $NLN $ATMF06ENS fcst.ensres.06 + $NLN $ATMFCST_ENSRES atmens_fcst + if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then + $NLN $ATMF03 fcst.03 + $NLN $ATMF03ENS fcst.ensres.03 + $NLN $ATMF04 fcst.04 + $NLN $ATMF04ENS fcst.ensres.04 + $NLN $ATMF05 fcst.05 + $NLN $ATMF05ENS fcst.ensres.05 + $NLN $ATMF07 fcst.07 + $NLN $ATMF07ENS fcst.ensres.07 + $NLN $ATMF08 fcst.08 + $NLN $ATMF08ENS fcst.ensres.08 + $NLN $ATMF09 fcst.09 + $NLN $ATMF09ENS fcst.ensres.09 + fi + export OMP_NUM_THREADS=$NTHREADS_CHGRES + SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS_ENKF}.txt} + + if [ $USE_CFP = "YES" ]; then + [[ -f $DATA/mp_chgres.sh ]] && rm $DATA/mp_chgres.sh + fi + + nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') + for FHR in $nfhrs; do + echo "Regridding deterministic forecast for forecast hour $FHR" + rm -f chgres_nc_gauss0$FHR.nml +cat > chgres_nc_gauss0$FHR.nml << EOF +&chgres_setup +i_output=$LONB_ENKF +j_output=$LATB_ENKF +input_file="fcst.0$FHR" +output_file="fcst.ensres.0$FHR" +terrain_file="atmens_fcst" +ref_file="atmens_fcst" +/ +EOF + if [ $USE_CFP = "YES" ]; then + echo "$nm $APRUN_CHGRES $CHGRESNCEXEC chgres_nc_gauss0$FHR.nml" | tee -a $DATA/mp_chgres.sh + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi + else + + export pgm=$CHGRESNCEXEC + . prep_step + + $APRUN_CHGRES $CHGRESNCEXEC chgres_nc_gauss0$FHR.nml + export err=$?; err_chk + fi + done + + if [ $USE_CFP = "YES" ]; then + chmod 755 $DATA/mp_chgres.sh + ncmd=$(cat $DATA/mp_chgres.sh | wc -l) + if [ $ncmd -gt 0 ]; then + ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) + APRUNCFP_CHGRES=$(eval echo $APRUNCFP) + + export pgm=$CHGRESNCEXEC + . prep_step + + $APRUNCFP_CHGRES $DATA/mp_chgres.sh + export err=$?; err_chk + fi + fi + +else + echo "DO_CALC_ANALYSIS != YES, doing nothing" +fi + + +################################################################################ +# Postprocessing +cd $pwd + +exit $err diff --git a/scripts/exgdas_atmos_gempak_gif_ncdc.sh b/scripts/exgdas_atmos_gempak_gif_ncdc.sh deleted file mode 100755 index 884ae1cf25..0000000000 --- a/scripts/exgdas_atmos_gempak_gif_ncdc.sh +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/sh -############################################################## -# Add the NCDC GIF processing to the end of the gempak_gif job -# There is no timing issue with the NCDC GIF, so it is -# okay to just add it here. If timing becomes a problem -# in the future, we should move it above somewhere else. -############################################################## -export PS4='exgempakgif_ncdc:$SECONDS + ' -set -xa - -cd $DATA -msg="The NCDC GIF processing has begun" -postmsg "$jlogfile" "$msg" - -export NTS=$USHgempak/restore - -if [ $MODEL = GDAS ] -then - case $MODEL in - GDAS) fcsthrs="000";; - esac - - export fhr - for fhr in $fcsthrs - do - icnt=1 - maxtries=180 - while [ $icnt -lt 1000 ] - do - if [ -r ${COMIN}/${RUN}_${PDY}${cyc}f${fhr} ] ; then - break - else - sleep 20 - let "icnt=icnt+1" - fi - if [ $icnt -ge $maxtries ] - then - msg="ABORTING after 1 hour of waiting for F$fhr to end." - err_exit $msg - fi - done - - cp ${COMIN}/${RUN}_${PDY}${cyc}f${fhr} gem_grids${fhr}.gem - export err=$? - if [[ $err -ne 0 ]] ; then - echo " File: ${COMIN}/${RUN}_${PDY}${cyc}f${fhr} does not exist." - exit $err - fi - - if [ $cyc -eq 00 -o $cyc -eq 12 ] - then - $USHgempak/gempak_${RUN}_f${fhr}_gif.sh - if [ ! -f $USHgempak/gempak_${RUN}_f${fhr}_gif.sh ] ; then - echo "WARNING: $USHgempak/gempak_${RUN}_f${fhr}_gif.sh FILE is missing" - msg=" $USHgempak/gempak_${RUN}_f${fhr}_gif.sh file is missing " - postmsg "jlogfile" "$msg" - fi - fi - - done -fi - -exit diff --git a/scripts/exgdas_atmos_nawips.sh b/scripts/exgdas_atmos_nawips.sh index 48146edf60..725cb0223f 100755 --- a/scripts/exgdas_atmos_nawips.sh +++ b/scripts/exgdas_atmos_nawips.sh @@ -1,30 +1,27 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################### -echo "----------------------------------------------------" -echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -echo "----------------------------------------------------" -echo "History: Mar 2000 - First implementation of this new script." -echo "S Lilly: May 2008 - add logic to make sure that all of the " -echo " data produced from the restricted ECMWF" -echo " data on the CCS is properly protected." +# echo "----------------------------------------------------" +# echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" +# echo "----------------------------------------------------" +# echo "History: Mar 2000 - First implementation of this new script." +# echo "S Lilly: May 2008 - add logic to make sure that all of the " +# echo " data produced from the restricted ECMWF" +# echo " data on the CCS is properly protected." ##################################################################### -set -xa +source "$HOMEgfs/ush/preamble.sh" "${2}" cd $DATA -RUN=$1 +RUN2=$1 fend=$2 DBN_ALERT_TYPE=$3 +destination=$4 -export 'PS4=$RUN:$SECONDS + ' - -DATA_RUN=$DATA/$RUN +DATA_RUN=$DATA/$RUN2 mkdir -p $DATA_RUN cd $DATA_RUN -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl export err=$? if [[ $err -ne 0 ]] ; then @@ -74,32 +71,25 @@ pdsext=no maxtries=180 fhcnt=$fstart while [ $fhcnt -le $fend ] ; do - typeset -Z3 fhr - - fhr=$fhcnt + fhr=$(printf "%03d" $fhcnt) fhcnt3=$(expr $fhr % 3) - fhr3=$fhcnt - typeset -Z3 fhr3 + fhr3=$(printf "%03d" $fhcnt) - GEMGRD=${RUN}_${PDY}${cyc}f${fhr3} + GEMGRD=${RUN2}_${PDY}${cyc}f${fhr3} - if [ $RUN = "gdas_0p25" ]; then - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.0p25.f${fhr} - if [ ! -f $GRIBIN ] ; then - echo "WARNING: $GRIBIN FILE is missing" - msg=" $GRIBIN file is missing " - postmsg "$jlogfile" "$msg" + if [[ ${RUN2} = "gdas_0p25" ]]; then + export GRIBIN=${COM_ATMOS_GRIB_0p25}/${model}.${cycle}.pgrb2.0p25.f${fhr} + if [[ ! -f ${GRIBIN} ]] ; then + echo "WARNING: ${GRIBIN} FILE is missing" fi - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.0p25.f${fhr}.idx + GRIBIN_chk=${COM_ATMOS_GRIB_0p25}${model}.${cycle}.pgrb2.0p25.f${fhr}.idx else - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.1p00.f${fhr} - if [ ! -f $GRIBIN ] ; then - echo "WARNING: $GRIBIN FILE is missing" - msg=" $GRIBIN file is missing " - postmsg "$jlogfile" "$msg" + export GRIBIN=${COM_ATMOS_GRIB_1p00}/${model}.${cycle}.pgrb2.1p00.f${fhr} + if [[ ! -f ${GRIBIN} ]] ; then + echo "WARNING: ${GRIBIN} FILE is missing" fi - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.1p00.f${fhr}.idx + GRIBIN_chk=${COM_ATMOS_GRIB_1p00}/${model}.${cycle}.pgrb2.1p00.f${fhr}.idx fi icnt=1 @@ -109,15 +99,13 @@ while [ $fhcnt -le $fend ] ; do sleep 5 break else - msg="The process is waiting ... ${GRIBIN_chk} file to proceed." - postmsg "${jlogfile}" "$msg" + echo "The process is waiting ... ${GRIBIN_chk} file to proceed." sleep 20 let "icnt=icnt+1" fi if [ $icnt -ge $maxtries ] then - msg="ABORTING: after 1 hour of waiting for ${GRIBIN_chk} file at F$fhr to end." - postmsg "${jlogfile}" "$msg" + echo "ABORTING: after 1 hour of waiting for ${GRIBIN_chk} file at F$fhr to end." export err=7 ; err_chk exit $err fi @@ -148,17 +136,17 @@ EOF export err=$?;err_chk if [ $SENDCOM = "YES" ] ; then - cp $GEMGRD $COMOUT/.$GEMGRD + cp "${GEMGRD}" "${destination}/.${GEMGRD}" export err=$? - if [[ $err -ne 0 ]] ; then - echo " File $GEMGRD does not exist." - exit $err + if [[ ${err} -ne 0 ]] ; then + echo " File ${GEMGRD} does not exist." + exit "${err}" fi - mv $COMOUT/.$GEMGRD $COMOUT/$GEMGRD - if [ $SENDDBN = "YES" ] ; then - $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \ - $COMOUT/$GEMGRD + mv "${destination}/.${GEMGRD}" "${destination}/${GEMGRD}" + if [[ ${SENDDBN} = "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${destination}/${GEMGRD}" else echo "##### DBN_ALERT_TYPE is: ${DBN_ALERT_TYPE} #####" fi @@ -173,16 +161,6 @@ done $GEMEXE/gpend ##################################################################### -# GOOD RUN -set +x -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -set -x -##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgdas_atmos_post.sh b/scripts/exgdas_atmos_post.sh new file mode 100755 index 0000000000..c49be8b0b8 --- /dev/null +++ b/scripts/exgdas_atmos_post.sh @@ -0,0 +1,335 @@ +#! /usr/bin/env bash + +##################################################################### +# echo "-----------------------------------------------------" +# echo " exgdas_nceppost.sh" +# echo " Sep 07 - Chuang - Modified script to run unified post" +# echo " July 14 - Carlis - Changed to 0.25 deg grib2 master file" +# echo " Feb 16 - Lin - Modify to use Vertical Structure" +# echo " Aug 17 - Meng - Modify to use 3-digit forecast hour naming" +# echo " master and flux files" +# echo " Dec 17 - Meng - Link sfc data file to flxfile " +# echo " since fv3gfs does not output sfc files any more." +# echo " Dec 17 - Meng - Add fv3gfs_downstream_nems.sh for pgb processing " +# echo " and remove writing data file to /nwges" +# echo " Jan 18 - Meng - For EE2 standard, move IDRT POSTGPVARS setting" +# echo " from j-job script." +# echo " Feb 18 - Meng - Removed legacy setting for generating grib1 data" +# echo " and reading sigio model outputs." +# echo " Aug 20 - Meng - Remove .ecf extentsion per EE2 review." +# echo " Sep 20 - Meng - Update clean up files per EE2 review." +# echo " Mar 21 - Meng - Update POSTGRB2TBL default setting." +# echo " Oct 21 - Meng - Remove jlogfile for wcoss2 transition." +# echo " Feb 22 - Lin - Exception handling if anl input not found." +# echo "-----------------------------------------------------" +##################################################################### + +source "${HOMEgfs}/ush/preamble.sh" + +cd "${DATA}" || exit 1 + +export POSTGPSH=${POSTGPSH:-${USHgfs}/gfs_post.sh} +export GFSDOWNSH=${GFSDOWNSH:-${USHgfs}/fv3gfs_downstream_nems.sh} +export GFSDWNSH=${GFSDWNSH:-${USHgfs}/fv3gfs_dwn_nems.sh} +export TRIMRH=${TRIMRH:-${USHgfs}/trim_rh.sh} +export MODICEC=${MODICEC:-${USHgfs}/mod_icec.sh} +export INLINE_POST=${INLINE_POST:-".false."} + +############################################################ +# Define Variables: +# ----------------- +# fhr is the current forecast hour. +# SLEEP_TIME is the number of seconds to sleep before exiting with error. +# SLEEP_INT is the number of seconds to sleep between restrt file checks. +# restart_file is the name of the file to key off of to kick off post. +############################################################ + +export IO=${LONB:-1440} +export JO=${LATB:-721} +# specify default model output format: 3 for sigio and 4 +# for nemsio +export OUTTYP=${OUTTYP:-4} +export PREFIX=${PREFIX:-${RUN}.t${cyc}z.} +export machine=${machine:-WCOSS2} + +########################### +# Specify Output layers +########################### +export POSTGPVARS="KPO=57,PO=1000.,975.,950.,925.,900.,875.,850.,825.,800.,775.,750.,725.,700.,675.,650.,625.,600.,575.,550.,525.,500.,475.,450.,425.,400.,375.,350.,325.,300.,275.,250.,225.,200.,175.,150.,125.,100.,70.,50.,40.,30.,20.,15.,10.,7.,5.,3.,2.,1.,0.7,0.4,0.2,0.1,0.07,0.04,0.02,0.01,rdaod=.true.," + +########################################################## +# Specify variable to directly output pgrb2 files for GDAS/GFS +########################################################## +export IDRT=${IDRT:-0} # IDRT=0 is setting for outputting grib files on lat/lon grid + +############################################################ +# Post Analysis Files before starting the Forecast Post +############################################################ +# Chuang: modify to process analysis when post_times is 00 +stime="$(echo "${post_times}" | cut -c1-3)" +export stime +export loganl="${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" + +if [[ "${stime}" = "anl" ]]; then + if [[ -f "${loganl}" ]]; then + # add new environmental variables for running new ncep post + # Validation date + + export VDATE=${PDY}${cyc} + + # set outtyp to 1 because we need to run chgres in the post before model start running chgres + # otherwise set to 0, then chgres will not be executed in global_nceppost.sh + + export OUTTYP=${OUTTYP:-4} + + # specify output file name from chgres which is input file name to nceppost + # if model already runs gfs io, make sure GFSOUT is linked to the gfsio file + # new imported variable for global_nceppost.sh + + export GFSOUT=${RUN}.${cycle}.gfsioanl + + # specify smaller control file for GDAS because GDAS does not + # produce flux file, the default will be /nwprod/parm/gfs_cntrl.parm + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + export POSTGRB2TBL=${POSTGRB2TBL:-${g2tmpl_ROOT}/share/params_grib2_tbl_new} + export PostFlatFile=${PostFlatFile:-${PARMpost}/postxconfig-NT-GFS-ANL.txt} + export CTLFILE=${PARMpost}/postcntrl_gfs_anl.xml + fi + + [[ -f flxfile ]] && rm flxfile ; [[ -f nemsfile ]] && rm nemsfile + + ln -fs "${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" nemsfile + export NEMSINP=nemsfile + ln -fs "${COM_ATMOS_ANALYSIS}/${PREFIX}sfcanl.nc" flxfile + export FLXINP=flxfile + export PGBOUT=pgbfile + export PGIOUT=pgifile + export PGBOUT2=pgbfile.grib2 + export PGIOUT2=pgifile.grib2.idx + export IGEN="${IGEN_ANL}" + export FILTER=0 + + # specify fhr even for analysis because postgp uses it + # export fhr=00 + + ${POSTGPSH} + export err=$?; err_chk + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + mv "${PGBOUT}" "${PGBOUT2}" + + #Proces pgb files + export FH=-1 + export downset=${downset:-1} + ${GFSDOWNSH} + export err=$?; err_chk + fi + + if [[ "${SENDCOM}" = 'YES' ]]; then + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + MASTERANL=${PREFIX}master.grb2anl + ##########XXW Accord to Boi, fortran index should use *if${fhr}, wgrib index use .idx + #MASTERANLIDX=${RUN}.${cycle}.master.grb2${fhr3}.idx + MASTERANLIDX=${PREFIX}master.grb2ianl + cp "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERANL}" + ${GRB2INDEX} "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERANLIDX}" + fi + + if [[ "${SENDDBN}" = 'YES' ]]; then + run="$(echo "${RUN}" | tr '[:lower:]' '[:upper:]')" + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_MSC_sfcanl" "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}sfcanl.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SA" "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "GDAS_PGA_GB2" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl" + "${DBNROOT}/bin/dbn_alert" MODEL "GDAS_PGA_GB2_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl.idx" + fi + fi + fi + rm pgbfile.grib2 + else + #### atmanl file not found need failing job + echo " *** FATAL ERROR: No model anl file output " + export err=9 + err_chk + fi +else ## not_anl if_stimes + SLEEP_LOOP_MAX=$(( SLEEP_TIME / SLEEP_INT )) + + ############################################################ + # Loop Through the Post Forecast Files + ############################################################ + + for fhr in ${post_times}; do + # Enforce decimal math expressions + d_fhr=$((10#${fhr})) + ############################### + # Start Looping for the + # existence of the restart files + ############################### + export pgm="postcheck" + ic=1 + while (( ic <= SLEEP_LOOP_MAX )); do + if [[ -f "${restart_file}${fhr}.txt" ]]; then + break + else + ic=$(( ic + 1 )) + sleep "${SLEEP_INT}" + fi + ############################### + # If we reach this point assume + # fcst job never reached restart + # period and error exit + ############################### + if (( ic == SLEEP_LOOP_MAX )); then + echo " *** FATAL ERROR: No model output for f${fhr} " + export err=9 + err_chk + fi + done + + ############################### + # Put restart files into /nwges + # for backup to start Model Fcst + ############################### + [[ -f flxfile ]] && rm flxfile + [[ -f nemsfile ]] && rm nemsfile + ln -sf "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhr}.nc" nemsfile + export NEMSINP=nemsfile + ln -sf "${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhr}.nc" flxfile + export FLXINP=flxfile + + if (( d_fhr > 0 )); then + export IGEN=${IGEN_FCST} + else + export IGEN=${IGEN_ANL} + fi + + # add new environmental variables for running new ncep post + # Validation date + + # No shellcheck, NDATE is not a typo + # shellcheck disable=SC2153 + VDATE="$(${NDATE} "+${fhr}" "${PDY}${cyc}")" + # shellcheck disable= + export VDATE + + # set to 3 to output lat/lon grid + + export OUTTYP=${OUTTYP:-4} + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + export POSTGRB2TBL="${POSTGRB2TBL:-${g2tmpl_ROOT}/share/params_grib2_tbl_new}" + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS.txt" + if [[ "${RUN}" = gfs ]]; then + export IGEN="${IGEN_GFS}" + if (( d_fhr > 0 )); then export IGEN="${IGEN_FCST}" ; fi + else + export IGEN="${IGEN_GDAS_ANL}" + if (( d_fhr > 0 )); then export IGEN="${IGEN_FCST}" ; fi + fi + if [[ "${RUN}" = gfs ]]; then + if (( d_fhr == 0 )); then + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-F00.txt" + export CTLFILE="${PARMpost}/postcntrl_gfs_f00.xml" + else + export CTLFILE="${CTLFILEGFS:-${PARMpost}/postcntrl_gfs.xml}" + fi + else + if (( d_fhr == 0 )); then + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-F00.txt" + export CTLFILE="${CTLFILEGFS:-${PARMpost}/postcntrl_gfs_f00.xml}" + else + export CTLFILE="${CTLFILEGFS:-${PARMpost}/postcntrl_gfs.xml}" + fi + fi + fi + + export FLXIOUT=flxifile + export PGBOUT=pgbfile + export PGIOUT=pgifile + export PGBOUT2=pgbfile.grib2 + export PGIOUT2=pgifile.grib2.idx + export FILTER=0 + export fhr3=${fhr} + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + MASTERFHR=${PREFIX}master.grb2f${fhr} + MASTERFHRIDX=${PREFIX}master.grb2if${fhr} + fi + + if [[ "${INLINE_POST}" = ".false." ]]; then + ${POSTGPSH} + else + cp "${COM_ATMOS_MASTER}/${MASTERFHR}" "${PGBOUT}" + fi + export err=$?; err_chk + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + mv "${PGBOUT}" "${PGBOUT2}" + fi + + #wm Process pgb files + export FH=$(( 10#${fhr} + 0 )) + export downset=${downset:-1} + ${GFSDOWNSH} + export err=$?; err_chk + + if [[ "${SENDDBN}" = "YES" ]]; then + run="$(echo "${RUN}" | tr '[:lower:]' '[:upper:]')" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB2_0P25" "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB2_0P25_WIDX ""${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB_GB2" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_PGB_GB2_WIDX" "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}.idx" + fi + + + if [[ "${SENDCOM}" = 'YES' ]]; then + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + if [[ "${INLINE_POST}" = ".false." ]]; then + cp "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERFHR}" + fi + ${GRB2INDEX} "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERFHRIDX}" + fi + + # Model generated flux files will be in nemsio after FY17 upgrade + # use post to generate Grib2 flux files + + if (( OUTTYP == 4 )) ; then + export NEMSINP=${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhr}.nc + export FLXINP=${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhr}.nc + if (( d_fhr == 0 )); then + export PostFlatFile=${PARMpost}/postxconfig-NT-GFS-FLUX-F00.txt + export CTLFILE=${PARMpost}/postcntrl_gfs_flux_f00.xml + else + export PostFlatFile=${PARMpost}/postxconfig-NT-GFS-FLUX.txt + export CTLFILE=${PARMpost}/postcntrl_gfs_flux.xml + fi + export PGBOUT=fluxfile + export FILTER=0 + FLUXFL=${PREFIX}sfluxgrbf${fhr}.grib2 + FLUXFLIDX=${PREFIX}sfluxgrbf${fhr}.grib2.idx + + if [[ "${INLINE_POST}" = ".false." ]]; then + ${POSTGPSH} + export err=$?; err_chk + mv fluxfile "${COM_ATMOS_MASTER}/${FLUXFL}" + fi + ${WGRIB2} -s "${COM_ATMOS_MASTER}/${FLUXFL}" > "${COM_ATMOS_MASTER}/${FLUXFLIDX}" + fi + + if [[ "${SENDDBN}" = 'YES' ]] && [[ "${RUN}" = 'gdas' ]] && (( d_fhr % 3 == 0 )); then + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SF" "${job}" "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhr}.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_BF" "${job}" "${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhr}.nc" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SGB_GB2" "${job}" "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrbf${fhr}.grib2" + "${DBNROOT}/bin/dbn_alert" MODEL "${run}_SGB_GB2_WIDX ""${job}" "${COM_ATMOS_MASTER}/${PREFIX}sfluxgrbf${fhr}.grib2.idx" + fi + fi + + [[ -f pgbfile.grib2 ]] && rm pgbfile.grib2 + [[ -f flxfile ]] && rm flxfile + done +fi ## end_if_times + +exit 0 + +################## END OF SCRIPT ####################### diff --git a/scripts/exgdas_atmos_verfozn.sh b/scripts/exgdas_atmos_verfozn.sh new file mode 100755 index 0000000000..aa686284be --- /dev/null +++ b/scripts/exgdas_atmos_verfozn.sh @@ -0,0 +1,85 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +# exgdas_vrfyozn.sh +# +# This script runs the data extract/validation portion of the Ozone Monitor +# (OznMon) DA package. +# +################################################################################ +err=0 + +#------------------------------------------------------------------------------- +# Set environment +# +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} + +# Other variables +export SATYPE_FILE=${SATYPE_FILE:-$FIXgdas_ozn/gdas_oznmon_satype.txt} +export PDATE=${PDY}${cyc} +export DO_DATA_RPT=${DO_DATA_RPT:-1} +export NCP=${NCP:-/bin/cp} + + +#----------------------------------------------------------------- +# ensure work and TANK dirs exist, verify oznstat is available +# +export OZN_WORK_DIR=${OZN_WORK_DIR:-$(pwd)} + +if [[ ! -d ${OZN_WORK_DIR} ]]; then + mkdir $OZN_WORK_DIR +fi +cd $OZN_WORK_DIR + +if [[ ! -d ${TANKverf_ozn} ]]; then + mkdir -p $TANKverf_ozn +fi + +if [[ -s ${oznstat} ]]; then + echo ${oznstat} is available +fi + + + +data_available=0 + +if [[ -s ${oznstat} ]]; then + data_available=1 + + #------------------------------------------------------------------ + # Copy data files file to local data directory. + # Untar oznstat file. + #------------------------------------------------------------------ + + $NCP $oznstat ./oznstat.$PDATE + + tar -xvf oznstat.$PDATE + rm oznstat.$PDATE + + netcdf=0 + count=$(ls diag* | grep ".nc4" | wc -l) + if [ $count -gt 0 ] ; then + netcdf=1 + for filenc4 in $(ls diag*nc4.gz); do + file=$(echo $filenc4 | cut -d'.' -f1-2).gz + mv $filenc4 $file + done + fi + + export OZNMON_NETCDF=${netcdf} + + ${HOMEoznmon}/ush/ozn_xtrct.sh + err=$? + +else + # oznstat file not found + err=1 +fi + +exit ${err} + diff --git a/scripts/exgdas_atmos_verfrad.sh b/scripts/exgdas_atmos_verfrad.sh new file mode 100755 index 0000000000..5306fbbdba --- /dev/null +++ b/scripts/exgdas_atmos_verfrad.sh @@ -0,0 +1,212 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_vrfyrad.sh +# Script description: Runs data extract/validation for global radiance diag data +# +# Author: Ed Safford Org: NP23 Date: 2012-01-18 +# +# Abstract: This script runs the data extract/validation portion of the +# RadMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + +export VERBOSE=${VERBOSE:-YES} + +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} + +# Filenames +biascr=${biascr:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.abias} +radstat=${radstat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.radstat} +satype_file=${satype_file:-${FIXgdas}/gdas_radmon_satype.txt} + +# Other variables +export RAD_AREA=${RAD_AREA:-glb} +export MAKE_CTL=${MAKE_CTL:-1} +export MAKE_DATA=${MAKE_DATA:-1} +export USE_ANL=${USE_ANL:-1} +export PDATE=${PDY}${cyc} +export DO_DIAG_RPT=${DO_DIAG_RPT:-1} +export DO_DATA_RPT=${DO_DATA_RPT:-1} +export NCP=${NCP:-/bin/cp} + +########################################################################### +# ensure TANK dir exists, verify radstat and biascr are available +# +if [[ ! -d ${TANKverf_rad} ]]; then + mkdir -p $TANKverf_rad +fi + +if [[ "$VERBOSE" = "YES" ]]; then + if [[ -s ${radstat} ]]; then + echo ${radstat} is available + fi + if [[ -s ${biascr} ]]; then + echo ${biascr} is available + fi +fi +##################################################################### + +data_available=0 +if [[ -s ${radstat} && -s ${biascr} ]]; then + data_available=1 + + #------------------------------------------------------------------ + # Copy data files file to local data directory. + # Untar radstat file. + #------------------------------------------------------------------ + + $NCP $biascr ./biascr.$PDATE + $NCP $radstat ./radstat.$PDATE + + tar -xvf radstat.$PDATE + rm radstat.$PDATE + + #------------------------------------------------------------------ + # SATYPE is the list of expected satellite/instrument sources + # in the radstat file. It should be stored in the $TANKverf + # directory. If it isn't there then use the $FIXgdas copy. In all + # cases write it back out to the radmon.$PDY directory. Add any + # new sources to the list before writing back out. + #------------------------------------------------------------------ + + radstat_satype=$(ls d*ges* | awk -F_ '{ print $2 "_" $3 }') + if [[ "$VERBOSE" = "YES" ]]; then + echo $radstat_satype + fi + + echo satype_file = $satype_file + + #------------------------------------------------------------------ + # Get previous cycle's date, and look for the satype_file. Using + # the previous cycle will get us the previous day's directory if + # the cycle being processed is 00z. + #------------------------------------------------------------------ + if [[ $cyc = "00" ]]; then + use_tankdir=${TANKverf_radM1} + else + use_tankdir=${TANKverf_rad} + fi + + echo satype_file = $satype_file + export SATYPE=$(cat ${satype_file}) + + + #------------------------------------------------------------- + # Update the SATYPE if any new sat/instrument was + # found in $radstat_satype. Write the SATYPE contents back + # to $TANKverf/radmon.$PDY. + #------------------------------------------------------------- + satype_changes=0 + new_satype=$SATYPE + for type in ${radstat_satype}; do + test=$(echo $SATYPE | grep $type | wc -l) + + if [[ $test -eq 0 ]]; then + if [[ "$VERBOSE" = "YES" ]]; then + echo "Found $type in radstat file but not in SATYPE list. Adding it now." + fi + satype_changes=1 + new_satype="$new_satype $type" + fi + done + + + #------------------------------------------------------------------ + # Rename the diag files and uncompress + #------------------------------------------------------------------ + netcdf=0 + + for type in ${SATYPE}; do + + if [[ netcdf -eq 0 && -e diag_${type}_ges.${PDATE}.nc4.${Z} ]]; then + netcdf=1 + fi + + if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_ges.${PDATE}*.${Z}" | wc -l) -gt 0 ]]; then + mv diag_${type}_ges.${PDATE}*.${Z} ${type}.${Z} + ${UNCOMPRESS} ./${type}.${Z} + else + echo "WARNING: diag_${type}_ges.${PDATE}*.${Z} not available, skipping" + fi + + if [[ $USE_ANL -eq 1 ]]; then + if [[ $(find . -maxdepth 1 -type f -name "diag_${type}_anl.${PDATE}*.${Z}" | wc -l) -gt 0 ]]; then + mv diag_${type}_anl.${PDATE}*.${Z} ${type}_anl.${Z} + ${UNCOMPRESS} ./${type}_anl.${Z} + else + echo "WARNING: diag_${type}_anl.${PDATE}*.${Z} not available, skipping" + fi + fi + done + + export RADMON_NETCDF=$netcdf + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHradmon}/radmon_verf_angle.sh ${PDATE} + rc_angle=$? + + ${USHradmon}/radmon_verf_bcoef.sh ${PDATE} + rc_bcoef=$? + + ${USHradmon}/radmon_verf_bcor.sh "${PDATE}" + rc_bcor=$? + + ${USHradmon}/radmon_verf_time.sh "${PDATE}" + rc_time=$? + + #-------------------------------------- + # optionally run clean_tankdir script + # + if [[ ${CLEAN_TANKVERF:-0} -eq 1 ]]; then + "${USHradmon}/clean_tankdir.sh" glb 60 + rc_clean_tankdir=$? + echo "rc_clean_tankdir = $rc_clean_tankdir" + fi + +fi + + + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_angle -ne 0 ]]; then + err=$rc_angle +elif [[ $rc_bcoef -ne 0 ]]; then + err=$rc_bcoef +elif [[ $rc_bcor -ne 0 ]]; then + err=$rc_bcor +elif [[ $rc_time -ne 0 ]]; then + err=$rc_time +fi + +##################################################################### +# Restrict select sensors and satellites +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +rlist="saphir" +for rtype in $rlist; do + if compgen -G "$TANKverf_rad/*${rtype}*" > /dev/null; then + ${CHGRP_CMD} "${TANKverf_rad}"/*${rtype}* + fi +done + +exit ${err} + diff --git a/scripts/exgdas_atmos_vminmon.sh b/scripts/exgdas_atmos_vminmon.sh new file mode 100755 index 0000000000..2a22fcb0b6 --- /dev/null +++ b/scripts/exgdas_atmos_vminmon.sh @@ -0,0 +1,113 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_vrfminmon.sh +# Script description: Runs data extract/validation for GSI normalization diag data +# +# Author: Ed Safford Org: NP23 Date: 2015-04-10 +# +# Abstract: This script runs the data extract/validation portion of the +# MinMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + + +######################################## +# Set environment +######################################## +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gdas} +export envir=${envir:-prod} + +######################################## +# Directories +######################################## +export DATA=${DATA:-$(pwd)} + + +######################################## +# Filenames +######################################## +gsistat=${gsistat:-${COM_ATMOS_ANALYSIS}/gdas.t${cyc}z.gsistat} +export mm_gnormfile=${gnormfile:-${M_FIXgdas}/gdas_minmon_gnorm.txt} +export mm_costfile=${costfile:-${M_FIXgdas}/gdas_minmon_cost.txt} + +######################################## +# Other variables +######################################## +export MINMON_SUFFIX=${MINMON_SUFFIX:-GDAS} +export PDATE=${PDY}${cyc} +export NCP=${NCP:-/bin/cp} +export pgm=exgdas_vrfminmon.sh + +if [[ ! -d ${DATA} ]]; then + mkdir $DATA +fi +cd $DATA + +###################################################################### + +data_available=0 + +if [[ -s ${gsistat} ]]; then + + data_available=1 + + #----------------------------------------------------------------------- + # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory + # It's ok if it doesn't exist; we'll create a new one if needed. + # + # Note: The logic below is to accomodate two different data storage + # methods. Some parallels (and formerly ops) dump all MinMon data for + # a given day in the same directory (if condition). Ops now separates + # data into ${cyc} subdirectories (elif condition). + #----------------------------------------------------------------------- + if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then + $NCP ${M_TANKverf}/gnorm_data.txt gnorm_data.txt + elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then + $NCP ${M_TANKverfM1}/gnorm_data.txt gnorm_data.txt + fi + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHminmon}/minmon_xtrct_costs.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_costs=$? + echo "rc_costs = $rc_costs" + + ${USHminmon}/minmon_xtrct_gnorms.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_gnorms=$? + echo "rc_gnorms = $rc_gnorms" + + ${USHminmon}/minmon_xtrct_reduct.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_reduct=$? + echo "rc_reduct = $rc_reduct" + +fi + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_costs -ne 0 ]]; then + err=$rc_costs +elif [[ $rc_gnorms -ne 0 ]]; then + err=$rc_gnorms +elif [[ $rc_reduct -ne 0 ]]; then + err=$rc_reduct +fi + +exit ${err} + diff --git a/scripts/exgdas_enkf_earc.sh b/scripts/exgdas_enkf_earc.sh new file mode 100755 index 0000000000..8f1928042f --- /dev/null +++ b/scripts/exgdas_enkf_earc.sh @@ -0,0 +1,304 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################## +# Begin JOB SPECIFIC work +############################################## +export n=$((10#${ENSGRP})) +export CDUMP_ENKF="${EUPD_CYC:-"gdas"}" + +export ARCH_LIST="${COM_TOP}/earc${ENSGRP}" + +# ICS are restarts and always lag INC by $assim_freq hours. +EARCINC_CYC=${ARCH_CYC} +EARCICS_CYC=$((ARCH_CYC-assim_freq)) +if [ "${EARCICS_CYC}" -lt 0 ]; then + EARCICS_CYC=$((EARCICS_CYC+24)) +fi + +[[ -d ${ARCH_LIST} ]] && rm -rf "${ARCH_LIST}" +mkdir -p "${ARCH_LIST}" +cd "${ARCH_LIST}" || exit 2 + +"${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}" +status=$? +if [ "${status}" -ne 0 ]; then + echo "${HOMEgfs}/ush/hpssarch_gen.sh ${RUN} failed, ABORT!" + exit "${status}" +fi + +cd "${ROTDIR}" || exit 2 + +source "${HOMEgfs}/ush/file_utils.sh" + +################################################################### +# ENSGRP > 0 archives a group of ensemble members +firstday=$(${NDATE} +24 "${SDATE}") +if (( 10#${ENSGRP} > 0 )) && [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then + +#--set the archiving command and create local directories, if necessary + TARCMD="htar" + if [[ ${LOCALARCH} = "YES" ]]; then + TARCMD="tar" + if [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]]; then mkdir -p "${ATARDIR}/${PDY}${cyc}"; fi + fi + +#--determine when to save ICs for warm start + SAVEWARMICA="NO" + SAVEWARMICB="NO" + mm="${PDY:4:2}" + dd="${PDY:6:2}" + nday=$(( (10#${mm}-1)*30+10#${dd} )) + mod=$((nday % ARCH_WARMICFREQ)) + if [ "${PDY}${cyc}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi + if [ "${PDY}${cyc}" -eq "${firstday}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi + if [ "${mod}" -eq 0 ] && [ "${cyc}" ] && [ "${EARCINC_CYC}" ]; then SAVEWARMICA="YES" ; fi + if [ "${mod}" -eq 0 ] && [ "${cyc}" ] && [ "${EARCICS_CYC}" ]; then SAVEWARMICB="YES" ; fi + + if [ "${EARCICS_CYC}" -eq 18 ]; then + nday1=$((nday+1)) + mod1=$((nday1 % ARCH_WARMICFREQ)) + if [ "${mod1}" -eq 0 ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi + if [ "${mod1}" -ne 0 ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="NO" ; fi + if [ "${PDY}${cyc}" -eq "${SDATE}" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ] ; then SAVEWARMICB="YES" ; fi + fi + + if [ "${PDY}${cyc}" -gt "${SDATE}" ]; then # Don't run for first half cycle + + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_grp${n}.txt") + status=$? + if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_grp${ENSGRP}.tar failed" + exit "${status}" + fi + + if [ "${SAVEWARMICA}" = "YES" ] && [ "${cyc}" -eq "${EARCINC_CYC}" ]; then + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restarta_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_restarta_grp${n}.txt") + status=$? + if [ "${status}" -ne 0 ]; then + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restarta_grp${ENSGRP}.tar failed" + exit "${status}" + fi + fi + + if [ "${SAVEWARMICB}" = "YES" ] && [ "${cyc}" -eq "${EARCICS_CYC}" ]; then + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}_restartb_grp${ENSGRP}.tar" $(cat "${ARCH_LIST}/${RUN}_restartb_grp${n}.txt") + status=$? + if [ "${status}" -ne 0 ]; then + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}_restartb_grp${ENSGRP}.tar failed" + exit "${status}" + fi + fi + + fi # CDATE>SDATE + +fi + + +################################################################### +# ENSGRP 0 archives ensemble means and copy data to online archive +if [ "${ENSGRP}" -eq 0 ]; then + + if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then + + #--set the archiving command and create local directories, if necessary + TARCMD="htar" + HSICMD="hsi" + if [[ ${LOCALARCH} = "YES" ]]; then + TARCMD="tar" + HSICMD="" + if [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]]; then mkdir -p "${ATARDIR}/${PDY}${cyc}"; fi + fi + + set +e + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" $(cat "${ARCH_LIST}/${RUN}.txt") + status=$? + ${HSICMD} chgrp rstprod "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" + ${HSICMD} chmod 640 "${ATARDIR}/${PDY}${cyc}/${RUN}.tar" + if (( status != 0 && ${PDY}${cyc} >= firstday )); then + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${RUN}.tar failed" + exit "${status}" + fi + set_strict + fi + + #-- Archive online for verification and diagnostics + [[ ! -d ${ARCDIR} ]] && mkdir -p "${ARCDIR}" + cd "${ARCDIR}" || exit 2 + + nb_copy "${COM_ATMOS_ANALYSIS_ENSSTAT}/${RUN}.t${cyc}z.enkfstat" \ + "enkfstat.${RUN}.${PDY}${cyc}" + nb_copy "${COM_ATMOS_ANALYSIS_ENSSTAT}/${RUN}.t${cyc}z.gsistat.ensmean" \ + "gsistat.${RUN}.${PDY}${cyc}.ensmean" +fi + + +if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then + exit 0 +fi + +############################################################### +# ENSGRP 0 also does clean-up +############################################################### +if [[ "${ENSGRP}" -eq 0 ]]; then + function remove_files() { + # TODO: move this to a new location + local directory=$1 + shift + if [[ ! -d ${directory} ]]; then + echo "No directory ${directory} to remove files from, skiping" + return + fi + local exclude_list="" + if (($# > 0)); then + exclude_list=$* + fi + local file_list + declare -a file_list + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + readarray -t file_list < <(find -L "${directory}" -type f) + if (( ${#file_list[@]} == 0 )); then return; fi + for exclude in ${exclude_list}; do + echo "Excluding ${exclude}" + declare -a file_list_old=("${file_list[@]}") + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}") + if (( ${#file_list[@]} == 0 )); then return; fi + done + + for file in "${file_list[@]}"; do + rm -f "${file}" + done + # Remove directory if empty + rmdir "${directory}" || true + } + + # Start start and end dates to remove + GDATEEND=$(${NDATE} -"${RMOLDEND_ENKF:-24}" "${PDY}${cyc}") + GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}") + + while [ "${GDATE}" -le "${GDATEEND}" ]; do + + gPDY="${GDATE:0:8}" + gcyc="${GDATE:8:2}" + + if [[ -d ${COM_TOP} ]]; then + rocotolog="${EXPDIR}/logs/${GDATE}.log" + if [[ -f "${rocotolog}" ]]; then + set +e + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success") + rc=$? + set_strict + if [ "${rc}" -eq 0 ]; then + case ${CDUMP} in + gdas) nmem="${NMEM_ENS}";; + gfs) nmem="${NMEM_ENS_GFS}";; + *) + echo "FATAL ERROR: Unknown CDUMP ${CDUMP} during cleanup" + exit 10 + ;; + esac + + readarray memlist< <(seq --format="mem%03g" 1 "${nmem}") + memlist+=("ensstat") + + for mem in "${memlist[@]}"; do + # Atmos + exclude_list="f006.ens" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Wave + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Ocean + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL') + for template in ${templates}; do + YMEMDIR="${mem}" MD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Ice + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Aerosols (GOCART) + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Mediator + exclude_list="" + # Suppress warnings about chained commands suppressing exit codes + # shellcheck disable=SC2312 + templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL') + for template in ${templates}; do + MEMDIR="${mem}" YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + done + fi + fi + fi + + # Remove any empty directories + YMD=${gPDY} HH=${gcyc} generate_com target_dir:COM_TOP_TMPL + target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/" + if [[ -d ${target_dir} ]]; then + find "${target_dir}" -empty -type d -delete + fi + + # Advance to next cycle + GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}") + done +fi + +# Remove enkf*.$rPDY for the older of GDATE or RDATE +GDATE=$(${NDATE} -"${RMOLDSTD_ENKF:-120}" "${PDY}${cyc}") +fhmax=${FHMAX_GFS} +RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") +if [ "${GDATE}" -lt "${RDATE}" ]; then + RDATE=${GDATE} +fi +rPDY=$(echo "${RDATE}" | cut -c1-8) +clist="enkfgdas enkfgfs" +for ctype in ${clist}; do + COMIN="${ROTDIR}/${ctype}.${rPDY}" + [[ -d ${COMIN} ]] && rm -rf "${COMIN}" +done + +############################################################### + + +exit 0 diff --git a/scripts/exgdas_enkf_ecen.sh b/scripts/exgdas_enkf_ecen.sh new file mode 100755 index 0000000000..de603cba3f --- /dev/null +++ b/scripts/exgdas_enkf_ecen.sh @@ -0,0 +1,361 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_ecen.sh +# Script description: recenter ensemble around hi-res deterministic analysis +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script recenters ensemble around hi-res deterministic analysis +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +pwd=$(pwd) + +# Base variables +CDATE=${CDATE:-"2010010100"} +CDUMP=${CDUMP:-"gdas"} +DONST=${DONST:-"NO"} +export CASE=${CASE:-384} +ntiles=${ntiles:-6} + +# Utilities +NCP=${NCP:-"/bin/cp -p"} +NLN=${NLN:-"/bin/ln -sf"} +NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} + +# Scripts + +# Executables. +GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-$HOMEgfs/exec/getsigensmeanp_smooth.x} +GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-$HOMEgfs/exec/getsfcensmeanp.x} +RECENATMEXEC=${RECENATMEXEC:-$HOMEgfs/exec/recentersigp.x} +CALCINCNEMSEXEC=${CALCINCNEMSEXEC:-$HOMEgfs/exec/calc_increment_ens.x} +CALCINCNCEXEC=${CALCINCEXEC:-$HOMEgfs/exec/calc_increment_ens_ncio.x} + +# Files. +OPREFIX=${OPREFIX:-""} +OSUFFIX=${OSUFFIX:-""} +APREFIX=${APREFIX:-""} +APREFIX_ENS=${APREFIX_ENS:-$APREFIX} +GPREFIX=${GPREFIX:-""} +GPREFIX_ENS=${GPREFIX_ENS:-$GPREFIX} + +# Variables +NMEM_ENS=${NMEM_ENS:-80} +imp_physics=${imp_physics:-99} +INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} +DOIAU=${DOIAU_ENKF:-"NO"} +FHMIN=${FHMIN_ECEN:-3} +FHMAX=${FHMAX_ECEN:-9} +FHOUT=${FHOUT_ECEN:-3} +FHSFC=${FHSFC_ECEN:-$FHMIN} +if [ $CDUMP = "enkfgfs" ]; then + DO_CALC_INCREMENT=${DO_CALC_INCREMENT_ENKF_GFS:-"NO"} +else + DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} +fi + +# global_chgres stuff +CHGRESNEMS=${CHGRESNEMS:-$HOMEgfs/exec/enkf_chgres_recenter.x} +CHGRESNC=${CHGRESNC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x} +NTHREADS_CHGRES=${NTHREADS_CHGRES:-24} +APRUN_CHGRES=${APRUN_CHGRES:-""} + +# global_cycle stuff +CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh} +export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle} +APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} +NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}} +export FIXfv3=${FIXfv3:-$HOMEgfs/fix/orog} +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} +export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"} +export FHOUR=${FHOUR:-0} +export DELTSFC=${DELTSFC:-6} + + +RECENTER_ENKF=${RECENTER_ENKF:-"YES"} +SMOOTH_ENKF=${SMOOTH_ENKF:-"YES"} + +APRUN_ECEN=${APRUN_ECEN:-${APRUN:-""}} +NTHREADS_ECEN=${NTHREADS_ECEN:-${NTHREADS:-1}} +APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} +NTHREADS_CALCINC=${NTHREADS_CALCINC:-${NTHREADS:-1}} + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 99 + +ENKF_SUFFIX="s" +[[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX="" + +################################################################################ +# Link ensemble member guess, analysis and increment files +for FHR in $(seq $FHMIN $FHOUT $FHMAX); do + +for imem in $(seq 1 $NMEM_ENS); do + memchar="mem"$(printf %03i $imem) + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \ + COM_ATMOS_HISTORY_MEM_PREV:COM_ATMOS_HISTORY_TMPL + + ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX_ENS}atmf00${FHR}${ENKF_SUFFIX}.nc" "./atmges_${memchar}" + if [ $DO_CALC_INCREMENT = "YES" ]; then + if [ $FHR -eq 6 ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}atmanl.nc" "./atmanl_${memchar}" + else + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}atma00${FHR}.nc" "./atmanl_${memchar}" + fi + fi + mkdir -p "${COM_ATMOS_ANALYSIS_MEM}" + if [ $FHR -eq 6 ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}atminc.nc" "./atminc_${memchar}" + else + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}atmi00${FHR}.nc" "./atminc_${memchar}" + fi + if [[ $RECENTER_ENKF = "YES" ]]; then + if [ $DO_CALC_INCREMENT = "YES" ]; then + if [ $FHR -eq 6 ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}ratmanl.nc" "./ratmanl_${memchar}" + else + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}ratma00${FHR}.nc" "./ratmanl_${memchar}" + fi + else + if [ $FHR -eq 6 ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}ratminc.nc" "./ratminc_${memchar}" + else + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX_ENS}ratmi00${FHR}.nc" "./ratminc_${memchar}" + fi + fi + fi +done + +if [ $DO_CALC_INCREMENT = "YES" ]; then + # Link ensemble mean analysis + if [ $FHR -eq 6 ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX_ENS}atmanl.ensmean.nc" "./atmanl_ensmean" + else + ${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX_ENS}atma00${FHR}.ensmean.nc" "./atmanl_ensmean" + fi + + # Compute ensemble mean analysis + DATAPATH="./" + ATMANLNAME="atmanl" + ATMANLMEANNAME="atmanl_ensmean" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + export pgm=$GETATMENSMEANEXEC + . prep_step + + $NCP $GETATMENSMEANEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMANLMEANNAME $ATMANLNAME $NMEM_ENS + export err=$?; err_chk +else + # Link ensemble mean increment + if [ $FHR -eq 6 ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX_ENS}atminc.ensmean.nc" "./atminc_ensmean" + else + ${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${APREFIX_ENS}atmi00${FHR}.ensmean.nc" "./atminc_ensmean" + fi + + # Compute ensemble mean increment + DATAPATH="./" + ATMINCNAME="atminc" + ATMINCMEANNAME="atminc_ensmean" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + export pgm=$GETATMENSMEANEXEC + . prep_step + + $NCP $GETATMENSMEANEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMINCMEANNAME $ATMINCNAME $NMEM_ENS + export err=$?; err_chk + + # If available, link to ensemble mean guess. Otherwise, compute ensemble mean guess + if [[ -s "${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX_ENS}atmf00${FHR}.ensmean.nc" ]]; then + ${NLN} "${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX_ENS}atmf00${FHR}.ensmean.nc" "./atmges_ensmean" + else + DATAPATH="./" + ATMGESNAME="atmges" + ATMGESMEANNAME="atmges_ensmean" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + export pgm=$GETATMENSMEANEXEC + . prep_step + + $NCP $GETATMENSMEANEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $GETATMENSMEANEXEC) $DATAPATH $ATMGESMEANNAME $ATMGESNAME $NMEM_ENS + export err=$?; err_chk + fi +fi + +if [ $DO_CALC_INCREMENT = "YES" ]; then + LONB_ENKF=${LONB_ENKF:-$($NCLEN atmanl_ensmean grid_xt)} # get LONB + LATB_ENKF=${LATB_ENKF:-$($NCLEN atmanl_ensmean grid_yt)} # get LATB + LEVS_ENKF=${LEVS_ENKF:-$($NCLEN atmanl_ensmean pfull)} # get LEVS +else + LONB_ENKF=${LONB_ENKF:-$($NCLEN atminc_ensmean lon)} # get LONB + LATB_ENKF=${LATB_ENKF:-$($NCLEN atminc_ensmean lat)} # get LATB + LEVS_ENKF=${LEVS_ENKF:-$($NCLEN atminc_ensmean lev)} # get LEVS +fi +JCAP_ENKF=${JCAP_ENKF:--9999} # there is no jcap in these files +[ $JCAP_ENKF -eq -9999 -a $LATB_ENKF -ne -9999 ] && JCAP_ENKF=$((LATB_ENKF-2)) +[ $LONB_ENKF -eq -9999 -o $LATB_ENKF -eq -9999 -o $LEVS_ENKF -eq -9999 -o $JCAP_ENKF -eq -9999 ] && exit -9999 + +################################################################################ +# This is to give the user the option to recenter, default is YES +if [ $RECENTER_ENKF = "YES" ]; then + + # GSI EnVar analysis + if [ $FHR -eq 6 ]; then + ATMANL_GSI="${COM_ATMOS_ANALYSIS_DET}/${APREFIX}atmanl.nc" + ATMANL_GSI_ENSRES="${COM_ATMOS_ANALYSIS_DET}/${APREFIX}atmanl.ensres.nc" + else + ATMANL_GSI="${COM_ATMOS_ANALYSIS_DET}/${APREFIX}atma00${FHR}.nc" + ATMANL_GSI_ENSRES="${COM_ATMOS_ANALYSIS_DET}/${APREFIX}atma00${FHR}.ensres.nc" + fi + + # if we already have a ensemble resolution GSI analysis then just link to it + if [ -f $ATMANL_GSI_ENSRES ]; then + + $NLN $ATMANL_GSI_ENSRES atmanl_gsi_ensres + + else + + $NLN $ATMANL_GSI atmanl_gsi + $NLN $ATMANL_GSI_ENSRES atmanl_gsi_ensres + SIGLEVEL=${SIGLEVEL:-${FIXgsm}/global_hyblev.l${LEVS}.txt} + $NLN $CHGRESNC chgres.x + chgresnml=chgres_nc_gauss.nml + nmltitle=chgres + + export OMP_NUM_THREADS=$NTHREADS_CHGRES + + [[ -f $chgresnml ]] && rm -f $chgresnml + cat > $chgresnml << EOF +&${nmltitle}_setup + i_output=$LONB_ENKF + j_output=$LATB_ENKF + input_file="atmanl_gsi" + output_file="atmanl_gsi_ensres" + terrain_file="atmanl_ensmean" + vcoord_file="$SIGLEVEL" +/ +EOF + cat $chgresnml + $APRUN_CHGRES ./chgres.x + export err=$?; err_chk + fi + + if [ $DO_CALC_INCREMENT = "YES" ]; then + ################################################################################ + # Recenter ensemble member atmospheric analyses about hires analysis + + FILENAMEIN="atmanl" + FILENAME_MEANIN="atmanl_ensmean" # EnKF ensemble mean analysis + FILENAME_MEANOUT="atmanl_gsi_ensres" # recenter around GSI analysis at ensemble resolution + FILENAMEOUT="ratmanl" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + export pgm=$RECENATMEXEC + . prep_step + + $NCP $RECENATMEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $RECENATMEXEC) $FILENAMEIN $FILENAME_MEANIN $FILENAME_MEANOUT $FILENAMEOUT $NMEM_ENS + export err=$?; err_chk + else + ################################################################################ + # Recenter ensemble member atmospheric increments about hires analysis + + FILENAMEIN="atminc" + FILENAME_INCMEANIN="atminc_ensmean" # EnKF ensemble mean increment + FILENAME_GESMEANIN="atmges_ensmean" # EnKF ensemble mean guess + FILENAME_GSIDET="atmanl_gsi_ensres" # recenter around GSI analysis at ensemble resolution + FILENAMEOUT="ratminc" + + export OMP_NUM_THREADS=$NTHREADS_ECEN + + # make the small namelist file for incvars_to_zero + + [[ -f recenter.nml ]] && rm recenter.nml + cat > recenter.nml << EOF +&recenter + incvars_to_zero = $INCREMENTS_TO_ZERO +/ +EOF +cat recenter.nml + + export pgm=$RECENATMEXEC + . prep_step + + $NCP $RECENATMEXEC $DATA + $APRUN_ECEN ${DATA}/$(basename $RECENATMEXEC) $FILENAMEIN $FILENAME_INCMEANIN $FILENAME_GSIDET $FILENAMEOUT $NMEM_ENS $FILENAME_GESMEANIN + export err=$?; err_chk + fi +fi + +################################################################################ +# Calculate ensemble analysis increment +if [ $DO_CALC_INCREMENT = "YES" ]; then + if [ $RECENTER_ENKF = "YES" ]; then + ATMANLNAME='ratmanl' + else + ATMANLNAME='atmanl' + fi + + export OMP_NUM_THREADS=$NTHREADS_CALCINC + CALCINCEXEC=$CALCINCNCEXEC + + export pgm=$CALCINCEXEC + . prep_step + + $NCP $CALCINCEXEC $DATA + [[ -f calc_increment.nml ]] && rm calc_increment.nml + cat > calc_increment.nml << EOF +&setup + datapath = './' + analysis_filename = '$ATMANLNAME' + firstguess_filename = 'atmges' + increment_filename = 'atminc' + debug = .false. + nens = $NMEM_ENS + imp_physics = $imp_physics +/ +&zeroinc + incvars_to_zero = $INCREMENTS_TO_ZERO +/ +EOF +cat calc_increment.nml + + $APRUN_CALCINC ${DATA}/$(basename $CALCINCEXEC) + export err=$?; err_chk +fi +done # loop over analysis times in window + +################################################################################ + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + + +exit ${err} diff --git a/scripts/exgdas_enkf_fcst.sh b/scripts/exgdas_enkf_fcst.sh new file mode 100755 index 0000000000..bc126d5906 --- /dev/null +++ b/scripts/exgdas_enkf_fcst.sh @@ -0,0 +1,248 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_fcst.sh +# Script description: Run ensemble forecasts +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script runs ensemble forecasts serially one-after-another +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +#### +################################################################################ + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +export FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} +export FIX_AM=${FIX_AM:-$FIX_DIR/am} + +# Utilities +export NCP=${NCP:-"/bin/cp -p"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} + +# Scripts. +FORECASTSH=${FORECASTSH:-$HOMEgfs/scripts/exglobal_forecast.sh} + +# Enemble group, begin and end +ENSGRP=${ENSGRP:-1} +ENSBEG=${ENSBEG:-1} +ENSEND=${ENSEND:-1} + +# Model builds +export FCSTEXECDIR=${FCSTEXECDIR:-$HOMEgfs/sorc/fv3gfs.fd/BUILD/bin} +export FCSTEXEC=${FCSTEXEC:-fv3gfs.x} + +# Get DA specific diag table. +export PARM_FV3DIAG=${PARM_FV3DIAG:-$HOMEgfs/parm/parm_fv3diag} +export DIAG_TABLE=${DIAG_TABLE_ENKF:-${DIAG_TABLE:-$PARM_FV3DIAG/diag_table_da}} + +# Re-run failed members, or entire group +RERUN_EFCSGRP=${RERUN_EFCSGRP:-"YES"} + +# Recenter flag and increment file prefix +RECENTER_ENKF=${RECENTER_ENKF:-"YES"} +export PREFIX_ATMINC=${PREFIX_ATMINC:-""} + +# Ops related stuff +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} + +################################################################################ +# Preprocessing +cd $DATA || exit 99 +DATATOP=$DATA + +################################################################################ +# Set output data +EFCSGRP="${COM_TOP}/efcs.grp${ENSGRP}" +if [ -f $EFCSGRP ]; then + if [ $RERUN_EFCSGRP = "YES" ]; then + rm -f $EFCSGRP + else + echo "RERUN_EFCSGRP = $RERUN_EFCSGRP, will re-run FAILED members only!" + $NMV $EFCSGRP ${EFCSGRP}.fail + fi +fi + +################################################################################ +# Set namelist/model config options common to all members once + +# There are many many model namelist options +# Some are resolution (CASE) dependent, some depend on the model configuration +# and will need to be added here before $FORECASTSH is called +# For now assume that +# 1. the ensemble and the deterministic are same resolution +# 2. the ensemble runs with the same configuration as the deterministic + +# Model config option for Ensemble +export TYPE=${TYPE_ENKF:-${TYPE:-nh}} # choices: nh, hydro +export MONO=${MONO_ENKF:-${MONO:-non-mono}} # choices: mono, non-mono + +# fv_core_nml +export CASE=${CASE_ENS:-${CASE:-C768}} +export layout_x=${layout_x_ENKF:-${layout_x:-8}} +export layout_y=${layout_y_ENKF:-${layout_y:-16}} +export LEVS=${LEVS_ENKF:-${LEVS:-64}} + +# nggps_diag_nml +export FHOUT=${FHOUT_ENKF:-3} +if [[ ${RUN} == "enkfgfs" ]]; then + export FHOUT=${FHOUT_ENKF_GFS:-${FHOUT_ENKF:${FHOUT:-3}}} +fi +# model_configure +export DELTIM=${DELTIM_ENKF:-${DELTIM:-225}} +export FHMAX=${FHMAX_ENKF:-9} +if [[ ${RUN} == "enkfgfs" ]]; then + export FHMAX=${FHMAX_ENKF_GFS:-${FHMAX_ENKF:-${FHMAX}}} +fi + +export restart_interval=${restart_interval_ENKF:-${restart_interval:-6}} + +# gfs_physics_nml +export FHSWR=${FHSWR_ENKF:-${FHSWR:-3600.}} +export FHLWR=${FHLWR_ENKF:-${FHLWR:-3600.}} +export IEMS=${IEMS_ENKF:-${IEMS:-1}} +export ISOL=${ISOL_ENKF:-${ISOL:-2}} +export IAER=${IAER_ENKF:-${IAER:-111}} +export ICO2=${ICO2_ENKF:-${ICO2:-2}} +export cdmbgwd=${cdmbgwd_ENKF:-${cdmbgwd:-"3.5,0.25"}} +export dspheat=${dspheat_ENKF:-${dspheat:-".true."}} +export shal_cnv=${shal_cnv_ENKF:-${shal_cnv:-".true."}} +export FHZER=${FHZER_ENKF:-${FHZER:-6}} +export FHCYC=${FHCYC_ENKF:-${FHCYC:-6}} + +# Set PREFIX_ATMINC to r when recentering on +if [ $RECENTER_ENKF = "YES" ]; then + export PREFIX_ATMINC="r" +fi + +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +declare -x gPDY="${GDATE:0:8}" +declare -x gcyc="${GDATE:8:2}" + +################################################################################ +# Run forecast for ensemble member +rc=0 +for imem in $(seq $ENSBEG $ENSEND); do + + cd $DATATOP + + cmem=$(printf %03i $imem) + memchar="mem${cmem}" + + echo "Processing MEMBER: $cmem" + + ra=0 + + skip_mem="NO" + if [ -f ${EFCSGRP}.fail ]; then + memstat=$(cat ${EFCSGRP}.fail | grep "MEMBER $cmem" | grep "PASS" | wc -l) + [[ $memstat -eq 1 ]] && skip_mem="YES" + fi + + # Construct COM variables from templates (see config.com) + # Can't make these read-only because we are looping over members + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_RESTART COM_ATMOS_INPUT COM_ATMOS_ANALYSIS \ + COM_ATMOS_HISTORY COM_ATMOS_MASTER + + RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ATMOS_RESTART_PREV:COM_ATMOS_RESTART_TMPL + + if [[ ${DO_WAVE} == "YES" ]]; then + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_WAVE_RESTART COM_WAVE_PREP COM_WAVE_HISTORY + RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_WAVE_RESTART_PREV:COM_WAVE_RESTART_TMPL + fi + + if [[ ${DO_OCN} == "YES" ]]; then + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_MED_RESTART COM_OCEAN_RESTART \ + COM_OCEAN_INPUT COM_OCEAN_HISTORY COM_OCEAN_ANALYSIS + RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_OCEAN_RESTART_PREV:COM_OCEAN_RESTART_TMPL + fi + + if [[ ${DO_ICE} == "YES" ]]; then + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ICE_HISTORY COM_ICE_INPUT COM_ICE_RESTART + RUN=${rCDUMP} MEMDIR="${memchar}" YMD="${gPDY}" HH="${gcyc}" generate_com -x COM_ICE_RESTART_PREV:COM_ICE_RESTART_TMPL + fi + + if [[ ${DO_AERO} == "YES" ]]; then + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_CHEM_HISTORY + fi + + + if [ $skip_mem = "NO" ]; then + + ra=0 + + export MEMBER=$imem + export DATA="${DATATOP}/${memchar}" + if [ -d $DATA ]; then rm -rf $DATA; fi + mkdir -p $DATA + $FORECASTSH + ra=$? + + # Notify a member forecast failed and abort + if [ $ra -ne 0 ]; then + err_exit "FATAL ERROR: forecast of member $cmem FAILED. Aborting job" + fi + + rc=$((rc+ra)) + + fi + + if [ $SENDDBN = YES ]; then + fhr=$FHOUT + while [ $fhr -le $FHMAX ]; do + FH3=$(printf %03i $fhr) + if [ $(expr $fhr % 3) -eq 0 ]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_ENKF "${job}" "${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc" + fi + fhr=$((fhr+FHOUT)) + done + fi + + cd $DATATOP + + if [ -s $EFCSGRP ]; then + $NCP $EFCSGRP log_old + fi + [[ -f log ]] && rm log + [[ -f log_new ]] && rm log_new + if [ $ra -ne 0 ]; then + echo "MEMBER $cmem : FAIL" > log + else + echo "MEMBER $cmem : PASS" > log + fi + if [ -s log_old ] ; then + cat log_old log > log_new + else + cat log > log_new + fi + $NCP log_new $EFCSGRP + +done + +################################################################################ +# Echo status of ensemble group +cd $DATATOP +echo "Status of ensemble members in group $ENSGRP:" +cat $EFCSGRP +[[ -f ${EFCSGRP}.fail ]] && rm ${EFCSGRP}.fail + +################################################################################ +# If any members failed, error out +export err=$rc; err_chk + +################################################################################ +# Postprocessing + +exit $err diff --git a/scripts/exgdas_enkf_post.sh b/scripts/exgdas_enkf_post.sh new file mode 100755 index 0000000000..86ab9071a4 --- /dev/null +++ b/scripts/exgdas_enkf_post.sh @@ -0,0 +1,162 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_post.sh +# Script description: Global ensemble forecast post processing +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script post-processes global ensemble forecast output +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +pwd=$(pwd) + +# Utilities +NCP=${NCP:-"/bin/cp"} +NLN=${NLN:-"/bin/ln -sf"} + +APRUN_EPOS=${APRUN_EPOS:-${APRUN:-""}} +NTHREADS_EPOS=${NTHREADS_EPOS:-1} + +# Ops stuff +SENDDBN=${SENDDBN:-"NO"} + +# Fix files +LEVS=${LEVS:-64} +HYBENSMOOTH=${HYBENSMOOTH:-$FIXgsi/global_hybens_smoothinfo.l${LEVS}.txt} + +# Executables. +GETATMENSMEANEXEC=${GETATMENSMEANEXEC:-$HOMEgfs/exec/getsigensmeanp_smooth.x} +GETSFCENSMEANEXEC=${GETSFCENSMEANEXEC:-$HOMEgfs/exec/getsfcensmeanp.x} + +# Other variables. +PREFIX=${PREFIX:-""} +FHMIN=${FHMIN_EPOS:-3} +FHMAX=${FHMAX_EPOS:-9} +FHOUT=${FHOUT_EPOS:-3} + +if [[ $CDUMP == "gfs" ]]; then + NMEM_ENS=${NMEM_ENS_GFS:-${NMEM_ENS:-30}} +fi +NMEM_ENS=${NMEM_ENS:-80} +SMOOTH_ENKF=${SMOOTH_ENKF:-"NO"} +ENKF_SPREAD=${ENKF_SPREAD:-"NO"} + +################################################################################ +# Preprocessing +ENKF_SUFFIX="s" +[[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX="" + +################################################################################ +# Copy executables to working directory +$NCP $GETSFCENSMEANEXEC $DATA +$NCP $GETATMENSMEANEXEC $DATA + +export OMP_NUM_THREADS=$NTHREADS_EPOS + +################################################################################ +# Forecast ensemble member files +for imem in $(seq 1 $NMEM_ENS); do + memchar="mem"$(printf %03i "${imem}") + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_HISTORY:COM_ATMOS_HISTORY_TMPL + + for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + ${NLN} "${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhrchar}.nc" "sfcf${fhrchar}_${memchar}" + ${NLN} "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhrchar}.nc" "atmf${fhrchar}_${memchar}" + done +done + +# Forecast ensemble mean and smoothed files +MEMDIR="ensstat" YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY_STAT:COM_ATMOS_HISTORY_TMPL +if [[ ! -d "${COM_ATMOS_HISTORY_STAT}" ]]; then mkdir -p "${COM_ATMOS_HISTORY_STAT}"; fi + +for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + ${NLN} "${COM_ATMOS_HISTORY_STAT}/${PREFIX}sfcf${fhrchar}.ensmean.nc" "sfcf${fhrchar}.ensmean" + ${NLN} "${COM_ATMOS_HISTORY_STAT}/${PREFIX}atmf${fhrchar}.ensmean.nc" "atmf${fhrchar}.ensmean" + if [ $SMOOTH_ENKF = "YES" ]; then + for imem in $(seq 1 $NMEM_ENS); do + memchar="mem"$(printf %03i "${imem}") + MEMDIR="${memchar}" YMD=${PDY} HH=${cyc} generate_com -x COM_ATMOS_HISTORY + ${NLN} "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhrchar}${ENKF_SUFFIX}.nc" "atmf${fhrchar}${ENKF_SUFFIX}_${memchar}" + done + fi + [[ $ENKF_SPREAD = "YES" ]] && ${NLN} "${COM_ATMOS_HISTORY_STAT}/${PREFIX}atmf${fhrchar}.ensspread.nc" "atmf${fhrchar}.ensspread" +done + +################################################################################ +# Generate ensemble mean surface and atmospheric files + +[[ $SMOOTH_ENKF = "YES" ]] && $NCP $HYBENSMOOTH ./hybens_smoothinfo + +rc=0 +for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + + export pgm=$GETSFCENSMEANEXEC + . prep_step + + $APRUN_EPOS ${DATA}/$(basename $GETSFCENSMEANEXEC) ./ sfcf${fhrchar}.ensmean sfcf${fhrchar} $NMEM_ENS + ra=$? + rc=$((rc+ra)) + + export_pgm=$GETATMENSMEANEXEC + . prep_step + + if [ $ENKF_SPREAD = "YES" ]; then + $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENS atmf${fhrchar}.ensspread + else + $APRUN_EPOS ${DATA}/$(basename $GETATMENSMEANEXEC) ./ atmf${fhrchar}.ensmean atmf${fhrchar} $NMEM_ENS + fi + ra=$? + rc=$((rc+ra)) +done +export err=$rc; err_chk + +################################################################################ +# If smoothing on but no smoothing output, copy smoothed ensemble atmospheric files +if [ $SMOOTH_ENKF = "YES" ]; then + for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + if [ ! -s atmf${fhrchar}${ENKF_SUFFIX}_mem001 ]; then + echo WARNING! no smoothed ensemble member for fhour = $fhrchar >&2 + for imem in $(seq 1 $NMEM_ENS); do + memchar="mem"$(printf %03i $imem) + ${NCP} "atmf${fhrchar}_${memchar}" "atmf${fhrchar}${ENKF_SUFFIX}_${memchar}" + done + fi + done +fi + +################################################################################ +# Send DBN alerts +if [ $SENDDBN = "YES" ]; then + + for fhr in $(seq $FHMIN $FHOUT $FHMAX); do + fhrchar=$(printf %03i $fhr) + if [ $(expr $fhr % 3) -eq 0 ]; then + if [ -s ./sfcf${fhrchar}.ensmean ]; then + ${DBNROOT}/bin/dbn_alert "MODEL" "GFS_ENKF" "${job}" "${COM_ATMOS_HISTORY_STAT}/${PREFIX}sfcf${fhrchar}.ensmean.nc" + fi + fi + done + +fi + +################################################################################ +# Postprocessing +cd $pwd + +exit $err diff --git a/scripts/exgdas_enkf_select_obs.sh b/scripts/exgdas_enkf_select_obs.sh new file mode 100755 index 0000000000..2ad624bcdb --- /dev/null +++ b/scripts/exgdas_enkf_select_obs.sh @@ -0,0 +1,113 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_select_obs.sh +# Script description: Compute global_gsi innovations +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script computes global_gsi innovations +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +pwd=$(pwd) + +# Utilities +export NLN=${NLN:-"/bin/ln -sf"} + +# Scripts. +ANALYSISSH=${ANALYSISSH:-$HOMEgfs/scripts/exglobal_atmos_analysis.sh} + +# Select obs +export RUN_SELECT=${RUN_SELECT:-"YES"} +export USE_SELECT=${USE_SELECT:-"NO"} + +# Observation Operator GSI namelist initialization +SETUP_INVOBS=${SETUP_INVOBS:-""} +GRIDOPTS_INVOBS=${GRIDOPTS_INVOBS:-""} +BKGVERR_INVOBS=${BKGVERR_INVOBS:-""} +ANBKGERR_INVOBS=${ANBKGERR_INVOBS:-""} +JCOPTS_INVOBS=${JCOPTS_INVOBS:-""} +STRONGOPTS_INVOBS=${STRONGOPTS_INVOBS:-""} +OBSQC_INVOBS=${OBSQC_INVOBS:-""} +OBSINPUT_INVOBS=${OBSINPUT_INVOBS:-""} +SUPERRAD_INVOBS=${SUPERRAD_INVOBS:-""} +SINGLEOB_INVOBS=${SINGLEOB_INVOBS:-""} +LAGDATA_INVOBS=${LAGDATA_INVOBS:-""} +HYBRID_ENSEMBLE_INVOBS=${HYBRID_ENSEMBLE_INVOBS:-""} +RAPIDREFRESH_CLDSURF_INVOBS=${RAPIDREFRESH_CLDSURF_INVOBS:-""} +CHEM_INVOBS=${CHEM_INVOBS:-""} + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 8 + +################################################################################ +# ObsInput file from ensemble mean +rm -f obs*input* +$NLN $SELECT_OBS obsinput.tar + +# Whether to save or skip obs +if [ $RUN_SELECT = "YES" -a $USE_SELECT = "NO" ]; then + lread_obs_save=".true." + lread_obs_skip=".false." +elif [ $RUN_SELECT = "NO" -a $USE_SELECT = "YES" ]; then + lread_obs_save=".false." + lread_obs_skip=".true." +fi + +################################################################################ +# Innovation Specific setup for ANALYSISSH +export DIAG_SUFFIX=${DIAG_SUFFIX:-""} +export DIAG_COMPRESS=${DIAG_COMPRESS:-"NO"} +export DIAG_TARBALL=${DIAG_TARBALL:-"YES"} +export DOHYBVAR="NO" +export DO_CALC_INCREMENT="NO" +export DO_CALC_ANALYSIS="NO" +export USE_CORRELATED_OBERRS="NO" +export write_fv3_increment=".false." + +# GSI Namelist options for observation operator only +export SETUP="miter=0,niter=1,lread_obs_save=$lread_obs_save,lread_obs_skip=$lread_obs_skip,lwrite_predterms=.true.,lwrite_peakwt=.true.,reduce_diag=.true.,$SETUP_INVOBS" +export GRIDOPTS="$GRIDOPTS_INVOBS" +export BKGVERR="bkgv_flowdep=.false.,$BKGVERR_INVOBS" +export ANBKGERR="$ANBKGERR_INVOBS" +export JCOPTS="$JCOPTS_INVOBS" +export STRONGOPTS="tlnmc_option=0,nstrong=0,nvmodes_keep=0,baldiag_full=.false.,baldiag_inc=.false.,$STRONGOPTS_INVOBS" +export OBSQC="$OBSQC_INVOBS" +export OBSINPUT="$OBSINPUT_INVOBS" +export SUPERRAD="$SUPERRAD_INVOBS" +export SINGLEOB="$SINGLEOB_INVOBS" +export LAGDATA="$LAGDATA_INVOBS" +export HYBRID_ENSEMBLE="" +export RAPIDREFRESH_CLDSURF="$RAPIDREFRESH_CLDSURF_INVOBS" +export CHEM="$CHEM_INVOBS" + +################################################################################ +# Execute GSI as a forward operator + +$ANALYSISSH +export err=$?; err_chk + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + + +exit $err diff --git a/scripts/exgdas_enkf_sfc.sh b/scripts/exgdas_enkf_sfc.sh new file mode 100755 index 0000000000..5bbe7a460f --- /dev/null +++ b/scripts/exgdas_enkf_sfc.sh @@ -0,0 +1,210 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_sfc.sh +# Script description: generate ensemble surface analyses on tiles +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script generates ensemble surface analyses on tiles +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +pwd=$(pwd) + +# Base variables +DONST=${DONST:-"NO"} +DOSFCANL_ENKF=${DOSFCANL_ENKF:-"YES"} +export CASE=${CASE:-384} +ntiles=${ntiles:-6} + +# Utilities +NCP=${NCP:-"/bin/cp -p"} +NLN=${NLN:-"/bin/ln -sf"} +NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} + +# Scripts + +# Executables. + +# Files. +OPREFIX=${OPREFIX:-""} +OSUFFIX=${OSUFFIX:-""} +APREFIX=${APREFIX:-""} +APREFIX_ENS=${APREFIX_ENS:-$APREFIX} +GPREFIX=${GPREFIX:-""} +GPREFIX_ENS=${GPREFIX_ENS:-${GPREFIX}} + +# Variables +NMEM_ENS=${NMEM_ENS:-80} +DOIAU=${DOIAU_ENKF:-"NO"} + +# Global_cycle stuff +CYCLESH=${CYCLESH:-$HOMEgfs/ush/global_cycle.sh} +export CYCLEXEC=${CYCLEXEC:-$HOMEgfs/exec/global_cycle} +APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} +NTHREADS_CYCLE=${NTHREADS_CYCLE:-${NTHREADS:-1}} +export FIXfv3=${FIXfv3:-$HOMEgfs/fix/orog} +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} +export CYCLVARS=${CYCLVARS:-"FSNOL=-2.,FSNOS=99999.,"} +export FHOUR=${FHOUR:-0} +export DELTSFC=${DELTSFC:-6} + +APRUN_ESFC=${APRUN_ESFC:-${APRUN:-""}} +NTHREADS_ESFC=${NTHREADS_ESFC:-${NTHREADS:-1}} + + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 99 + + +################################################################################ +# Update surface fields in the FV3 restart's using global_cycle. + +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +BDATE=$(${NDATE} -3 "${PDY}${cyc}") +bPDY=${BDATE:0:8} +bcyc=${BDATE:8:2} + +# Get dimension information based on CASE +res=${CASE:2:} +JCAP_CASE=$((res*2-2)) +LATB_CASE=$((res*2)) +LONB_CASE=$((res*4)) + +# Global cycle requires these files +export FNTSFA=${FNTSFA:-' '} +export FNACNA=${FNACNA:-${COM_OBS}/${OPREFIX}seaice.5min.blend.grb} +export FNSNOA=${FNSNOA:-${COM_OBS}/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f $FNSNOA ]] && export FNSNOA="${COM_OBS}/${OPREFIX}snogrb_t1534.3072.1536" +FNSNOG=${FNSNOG:-${COM_OBS_PREV}/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f $FNSNOG ]] && FNSNOG="${COM_OBS_PREV}/${GPREFIX}snogrb_t1534.3072.1536" + +# Set CYCLVARS by checking grib date of current snogrb vs that of prev cycle +if [ ${RUN_GETGES:-"NO"} = "YES" ]; then + # Ignore possible spelling error (nothing is misspelled) + # shellcheck disable=SC2153 + snoprv=$($GETGESSH -q -t snogrb_$JCAP_CASE -e $gesenvir -n $GDUMP -v $GDATE) +else + snoprv=${snoprv:-$FNSNOG} +fi + +if [ $($WGRIB -4yr $FNSNOA 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') -le \ + $($WGRIB -4yr $snoprv 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') ] ; then + export FNSNOA=" " + export CYCLVARS="FSNOL=99999.,FSNOS=99999.," +else + export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-0.} + export CYCLVARS="FSNOL=${SNOW_NUDGE_COEFF},$CYCLVARS" +fi + +if [ $DONST = "YES" ]; then + export NST_FILE=${NST_FILE:-${COM_ATMOS_ANALYSIS_DET}/${APREFIX}dtfanl.nc} +else + export NST_FILE="NULL" +fi + +export APRUNCY=${APRUN_CYCLE:-$APRUN_ESFC} +export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE:-$NTHREADS_ESFC} +export MAX_TASKS_CY=$NMEM_ENS + +if [ $DOIAU = "YES" ]; then + # Update surface restarts at beginning of window when IAU is ON + # For now assume/hold dtfanl.nc is valid at beginning of window. + + for n in $(seq 1 $ntiles); do + + export TILE_NUM=$n + + for imem in $(seq 1 $NMEM_ENS); do + + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + MEMDIR=${memchar} RUN="enkfgdas" YMD=${gPDY} HH=${gcyc} generate_com \ + COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + + ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + "${DATA}/fnbgsi.${cmem}" + ${NLN} "${COM_ATMOS_RESTART_MEM}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" \ + "${DATA}/fnbgso.${cmem}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + + done + + CDATE="${PDY}${cyc}" ${CYCLESH} + export err=$?; err_chk + + done + +fi + +if [ $DOSFCANL_ENKF = "YES" ]; then + for n in $(seq 1 $ntiles); do + + export TILE_NUM=$n + + for imem in $(seq 1 $NMEM_ENS); do + + cmem=$(printf %03i $imem) + memchar="mem$cmem" + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL + + RUN="${GDUMP_ENS}" MEMDIR=${memchar} YMD=${gPDY} HH=${gcyc} generate_com \ + COM_ATMOS_RESTART_MEM_PREV:COM_ATMOS_RESTART_TMPL + + [[ ${TILE_NUM} -eq 1 ]] && mkdir -p "${COM_ATMOS_RESTART_MEM}" + + ${NCP} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_ATMOS_RESTART_MEM_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + "${DATA}/fnbgsi.${cmem}" + ${NLN} "${COM_ATMOS_RESTART_MEM}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" \ + "${DATA}/fnbgso.${cmem}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.${cmem}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.${cmem}" + + done + + CDATE="${PDY}${cyc}" ${CYCLESH} + export err=$?; err_chk + + done +fi + +################################################################################ + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + + +exit $err diff --git a/scripts/exgdas_enkf_update.sh b/scripts/exgdas_enkf_update.sh new file mode 100755 index 0000000000..2bb749e226 --- /dev/null +++ b/scripts/exgdas_enkf_update.sh @@ -0,0 +1,406 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgdas_enkf_update.sh +# Script description: Make global_enkf update +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script runs the global_enkf update +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +pwd=$(pwd) + +# Utilities +NCP=${NCP:-"/bin/cp -p"} +NLN=${NLN:-"/bin/ln -sf"} +NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} +USE_CFP=${USE_CFP:-"NO"} +CFP_MP=${CFP_MP:-"NO"} +nm="" +if [ $CFP_MP = "YES" ]; then + nm=0 +fi +APRUNCFP=${APRUNCFP:-""} +APRUN_ENKF=${APRUN_ENKF:-${APRUN:-""}} +NTHREADS_ENKF=${NTHREADS_ENKF:-${NTHREADS:-1}} + +# Executables +ENKFEXEC=${ENKFEXEC:-$HOMEgfs/exec/enkf.x} + +# Cycling and forecast hour specific parameters +CDATE=${CDATE:-"2001010100"} +CDUMP=${CDUMP:-"gdas"} + +# Filenames. +GPREFIX=${GPREFIX:-""} +APREFIX=${APREFIX:-""} + +SMOOTH_ENKF=${SMOOTH_ENKF:-"YES"} + +GBIASe=${GBIASe:-${APREFIX}abias_int.ensmean} +CNVSTAT=${CNVSTAT:-${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${APREFIX}oznstat} +RADSTAT=${RADSTAT:-${APREFIX}radstat} +ENKFSTAT=${ENKFSTAT:-${APREFIX}enkfstat} + +# Namelist parameters +USE_CORRELATED_OBERRS=${USE_CORRELATED_OBERRS:-"NO"} +NMEM_ENS=${NMEM_ENS:-80} +NAM_ENKF=${NAM_ENKF:-""} +SATOBS_ENKF=${SATOBS_ENKF:-""} +OZOBS_ENKF=${OZOBS_ENKF:-""} +use_correlated_oberrs=${use_correlated_oberrs:-".false."} +if [ $USE_CORRELATED_OBERRS == "YES" ]; then + use_correlated_oberrs=".true." +fi +imp_physics=${imp_physics:-"99"} +lupp=${lupp:-".true."} +corrlength=${corrlength:-1250} +lnsigcutoff=${lnsigcutoff:-2.5} +analpertwt=${analpertwt:-0.85} +readin_localization_enkf=${readin_localization_enkf:-".true."} +reducedgrid=${reducedgrid:-".true."} +letkf_flag=${letkf_flag:-".false."} +getkf=${getkf:-".false."} +denkf=${denkf:-".false."} +nobsl_max=${nobsl_max:-10000} +lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} +write_spread_diag=${write_spread_diag:-".false."} +cnvw_option=${cnvw_option:-".false."} +netcdf_diag=${netcdf_diag:-".true."} +modelspace_vloc=${modelspace_vloc:-".false."} # if true, 'vlocal_eig.dat' is needed +IAUFHRS_ENKF=${IAUFHRS_ENKF:-6} +if [ $CDUMP = "enkfgfs" ]; then + DO_CALC_INCREMENT=${DO_CALC_INCREMENT_ENKF_GFS:-"NO"} +else + DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} +fi +INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} + +################################################################################ + +ATMGES_ENSMEAN="${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX}atmf006.ensmean.nc" +LONB_ENKF=${LONB_ENKF:-$($NCLEN $ATMGES_ENSMEAN grid_xt)} # get LONB_ENKF +LATB_ENKF=${LATB_ENKF:-$($NCLEN $ATMGES_ENSMEAN grid_yt)} # get LATB_ENFK +LEVS_ENKF=${LEVS_ENKF:-$($NCLEN $ATMGES_ENSMEAN pfull)} # get LEVS_ENFK +use_gfs_ncio=".true." +use_gfs_nemsio=".false." +paranc=${paranc:-".true."} +WRITE_INCR_ZERO="incvars_to_zero= $INCREMENTS_TO_ZERO," +if [ $DO_CALC_INCREMENT = "YES" ]; then + write_fv3_incr=".false." +else + write_fv3_incr=".true." +fi +LATA_ENKF=${LATA_ENKF:-$LATB_ENKF} +LONA_ENKF=${LONA_ENKF:-$LONB_ENKF} +SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt} +SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt} +CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt} +OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt} +SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt} +HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS_ENKF}.txt} +ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS_ENKF}.txt} +VLOCALEIG=${VLOCALEIG:-${FIXgsi}/vlocal_eig_l${LEVS_ENKF}.dat} +ENKF_SUFFIX="s" +[[ $SMOOTH_ENKF = "NO" ]] && ENKF_SUFFIX="" + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi +cd $DATA || exit 99 + +################################################################################ +# Fixed files +$NLN $SATANGL satbias_angle +$NLN $SATINFO satinfo +$NLN $SCANINFO scaninfo +$NLN $CONVINFO convinfo +$NLN $OZINFO ozinfo +$NLN $HYBENSINFO hybens_info +$NLN $ANAVINFO anavinfo +$NLN $VLOCALEIG vlocal_eig.dat + +# Bias correction coefficients based on the ensemble mean +${NLN} "${COM_ATMOS_ANALYSIS_STAT}/${GBIASe}" "satbias_in" + +################################################################################ + +if [ $USE_CFP = "YES" ]; then + [[ -f $DATA/untar.sh ]] && rm $DATA/untar.sh + [[ -f $DATA/mp_untar.sh ]] && rm $DATA/mp_untar.sh + cat > $DATA/untar.sh << EOFuntar +#!/bin/sh +memchar=\$1 +COM_ATMOS_ANALYSIS=\$2 +flist="$CNVSTAT $OZNSTAT $RADSTAT" +for ftype in \$flist; do + if [ \$memchar = "ensmean" ]; then + fname=\${COM_ATMOS_ANALYSIS}/\${ftype}.ensmean + else + fname=\${COM_ATMOS_ANALYSIS}/\${ftype} + fi + tar -xvf \$fname +done +EOFuntar + chmod 755 $DATA/untar.sh +fi + +################################################################################ +# Ensemble guess, observational data and analyses/increments + +flist="$CNVSTAT $OZNSTAT $RADSTAT" +if [ $USE_CFP = "YES" ]; then + echo "${nm} ${DATA}/untar.sh ensmean ${COM_ATMOS_ANALYSIS_STAT}" | tee -a "${DATA}/mp_untar.sh" + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi +else + for ftype in $flist; do + fname="${COM_ATMOS_ANALYSIS_STAT}/${ftype}.ensmean" + tar -xvf $fname + done +fi +nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') +for imem in $(seq 1 $NMEM_ENS); do + memchar="mem"$(printf %03i $imem) + + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com -x \ + COM_ATMOS_HISTORY_MEM_PREV:COM_ATMOS_HISTORY_TMPL + + MEMDIR=${memchar} YMD=${PDY} HH=${cyc} generate_com -x \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL + + if [ $lobsdiag_forenkf = ".false." ]; then + if [ $USE_CFP = "YES" ]; then + echo "${nm} ${DATA}/untar.sh ${memchar} ${COM_ATMOS_ANALYSIS_MEM}" | tee -a "${DATA}/mp_untar.sh" + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi + else + for ftype in $flist; do + fname="${COM_ATMOS_ANALYSIS_MEM}/${ftype}" + tar -xvf $fname + done + fi + fi + mkdir -p "${COM_ATMOS_ANALYSIS_MEM}" + for FHR in $nfhrs; do + ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}atmf00${FHR}${ENKF_SUFFIX}.nc" \ + "sfg_${PDY}${cyc}_fhr0${FHR}_${memchar}" + if [ $cnvw_option = ".true." ]; then + ${NLN} "${COM_ATMOS_HISTORY_MEM_PREV}/${GPREFIX}sfcf00${FHR}.nc" \ + "sfgsfc_${PDY}${cyc}_fhr0${FHR}_${memchar}" + fi + if [ $FHR -eq 6 ]; then + if [ $DO_CALC_INCREMENT = "YES" ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}atmanl.nc" \ + "sanl_${PDY}${cyc}_fhr0${FHR}_${memchar}" + else + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}atminc.nc" \ + "incr_${PDY}${cyc}_fhr0${FHR}_${memchar}" + fi + else + if [ $DO_CALC_INCREMENT = "YES" ]; then + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}atma00${FHR}.nc" \ + "sanl_${PDY}${cyc}_fhr0${FHR}_${memchar}" + else + ${NLN} "${COM_ATMOS_ANALYSIS_MEM}/${APREFIX}atmi00${FHR}.nc" \ + "incr_${PDY}${cyc}_fhr0${FHR}_${memchar}" + fi + fi + done +done + +# Ensemble mean guess +for FHR in $nfhrs; do + + ${NLN} "${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX}atmf00${FHR}.ensmean.nc" \ + "sfg_${PDY}${cyc}_fhr0${FHR}_ensmean" + if [ $cnvw_option = ".true." ]; then + ${NLN} "${COM_ATMOS_HISTORY_STAT_PREV}/${GPREFIX}sfcf00${FHR}.ensmean.nc" \ + "sfgsfc_${PDY}${cyc}_fhr0${FHR}_ensmean" + fi +done + +if [ $USE_CFP = "YES" ]; then + chmod 755 $DATA/mp_untar.sh + ncmd=$(cat $DATA/mp_untar.sh | wc -l) + if [ $ncmd -gt 0 ]; then + ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) + APRUNCFP=$(eval echo $APRUNCFP) + $APRUNCFP $DATA/mp_untar.sh + export err=$?; err_chk + fi +fi + +################################################################################ +# Create global_enkf namelist +cat > enkf.nml << EOFnml +&nam_enkf + datestring="${PDY}${cyc}",datapath="$DATA/", + analpertwtnh=${analpertwt},analpertwtsh=${analpertwt},analpertwttr=${analpertwt}, + covinflatemax=1.e2,covinflatemin=1,pseudo_rh=.true.,iassim_order=0, + corrlengthnh=${corrlength},corrlengthsh=${corrlength},corrlengthtr=${corrlength}, + lnsigcutoffnh=${lnsigcutoff},lnsigcutoffsh=${lnsigcutoff},lnsigcutofftr=${lnsigcutoff}, + lnsigcutoffpsnh=${lnsigcutoff},lnsigcutoffpssh=${lnsigcutoff},lnsigcutoffpstr=${lnsigcutoff}, + lnsigcutoffsatnh=${lnsigcutoff},lnsigcutoffsatsh=${lnsigcutoff},lnsigcutoffsattr=${lnsigcutoff}, + obtimelnh=1.e30,obtimelsh=1.e30,obtimeltr=1.e30, + saterrfact=1.0,numiter=0, + sprd_tol=1.e30,paoverpb_thresh=0.98, + nlons=$LONA_ENKF,nlats=$LATA_ENKF,nlevs=$LEVS_ENKF,nanals=$NMEM_ENS, + deterministic=.true.,sortinc=.true.,lupd_satbiasc=.false., + reducedgrid=${reducedgrid},readin_localization=${readin_localization_enkf}., + use_gfs_nemsio=${use_gfs_nemsio},use_gfs_ncio=${use_gfs_ncio},imp_physics=$imp_physics,lupp=$lupp, + univaroz=.false.,adp_anglebc=.true.,angord=4,use_edges=.false.,emiss_bc=.true., + letkf_flag=${letkf_flag},nobsl_max=${nobsl_max},denkf=${denkf},getkf=${getkf}., + nhr_anal=${IAUFHRS_ENKF},nhr_state=${IAUFHRS_ENKF},use_qsatensmean=.true., + lobsdiag_forenkf=$lobsdiag_forenkf, + write_spread_diag=$write_spread_diag, + modelspace_vloc=$modelspace_vloc, + use_correlated_oberrs=${use_correlated_oberrs}, + netcdf_diag=$netcdf_diag,cnvw_option=$cnvw_option, + paranc=$paranc,write_fv3_incr=$write_fv3_incr, + $WRITE_INCR_ZERO + $NAM_ENKF +/ +&satobs_enkf + sattypes_rad(1) = 'amsua_n15', dsis(1) = 'amsua_n15', + sattypes_rad(2) = 'amsua_n18', dsis(2) = 'amsua_n18', + sattypes_rad(3) = 'amsua_n19', dsis(3) = 'amsua_n19', + sattypes_rad(4) = 'amsub_n16', dsis(4) = 'amsub_n16', + sattypes_rad(5) = 'amsub_n17', dsis(5) = 'amsub_n17', + sattypes_rad(6) = 'amsua_aqua', dsis(6) = 'amsua_aqua', + sattypes_rad(7) = 'amsua_metop-a', dsis(7) = 'amsua_metop-a', + sattypes_rad(8) = 'airs_aqua', dsis(8) = 'airs_aqua', + sattypes_rad(9) = 'hirs3_n17', dsis(9) = 'hirs3_n17', + sattypes_rad(10)= 'hirs4_n19', dsis(10)= 'hirs4_n19', + sattypes_rad(11)= 'hirs4_metop-a', dsis(11)= 'hirs4_metop-a', + sattypes_rad(12)= 'mhs_n18', dsis(12)= 'mhs_n18', + sattypes_rad(13)= 'mhs_n19', dsis(13)= 'mhs_n19', + sattypes_rad(14)= 'mhs_metop-a', dsis(14)= 'mhs_metop-a', + sattypes_rad(15)= 'goes_img_g11', dsis(15)= 'imgr_g11', + sattypes_rad(16)= 'goes_img_g12', dsis(16)= 'imgr_g12', + sattypes_rad(17)= 'goes_img_g13', dsis(17)= 'imgr_g13', + sattypes_rad(18)= 'goes_img_g14', dsis(18)= 'imgr_g14', + sattypes_rad(19)= 'goes_img_g15', dsis(19)= 'imgr_g15', + sattypes_rad(20)= 'avhrr_n18', dsis(20)= 'avhrr3_n18', + sattypes_rad(21)= 'avhrr_metop-a', dsis(21)= 'avhrr3_metop-a', + sattypes_rad(22)= 'avhrr_n19', dsis(22)= 'avhrr3_n19', + sattypes_rad(23)= 'amsre_aqua', dsis(23)= 'amsre_aqua', + sattypes_rad(24)= 'ssmis_f16', dsis(24)= 'ssmis_f16', + sattypes_rad(25)= 'ssmis_f17', dsis(25)= 'ssmis_f17', + sattypes_rad(26)= 'ssmis_f18', dsis(26)= 'ssmis_f18', + sattypes_rad(27)= 'ssmis_f19', dsis(27)= 'ssmis_f19', + sattypes_rad(28)= 'ssmis_f20', dsis(28)= 'ssmis_f20', + sattypes_rad(29)= 'sndrd1_g11', dsis(29)= 'sndrD1_g11', + sattypes_rad(30)= 'sndrd2_g11', dsis(30)= 'sndrD2_g11', + sattypes_rad(31)= 'sndrd3_g11', dsis(31)= 'sndrD3_g11', + sattypes_rad(32)= 'sndrd4_g11', dsis(32)= 'sndrD4_g11', + sattypes_rad(33)= 'sndrd1_g12', dsis(33)= 'sndrD1_g12', + sattypes_rad(34)= 'sndrd2_g12', dsis(34)= 'sndrD2_g12', + sattypes_rad(35)= 'sndrd3_g12', dsis(35)= 'sndrD3_g12', + sattypes_rad(36)= 'sndrd4_g12', dsis(36)= 'sndrD4_g12', + sattypes_rad(37)= 'sndrd1_g13', dsis(37)= 'sndrD1_g13', + sattypes_rad(38)= 'sndrd2_g13', dsis(38)= 'sndrD2_g13', + sattypes_rad(39)= 'sndrd3_g13', dsis(39)= 'sndrD3_g13', + sattypes_rad(40)= 'sndrd4_g13', dsis(40)= 'sndrD4_g13', + sattypes_rad(41)= 'sndrd1_g14', dsis(41)= 'sndrD1_g14', + sattypes_rad(42)= 'sndrd2_g14', dsis(42)= 'sndrD2_g14', + sattypes_rad(43)= 'sndrd3_g14', dsis(43)= 'sndrD3_g14', + sattypes_rad(44)= 'sndrd4_g14', dsis(44)= 'sndrD4_g14', + sattypes_rad(45)= 'sndrd1_g15', dsis(45)= 'sndrD1_g15', + sattypes_rad(46)= 'sndrd2_g15', dsis(46)= 'sndrD2_g15', + sattypes_rad(47)= 'sndrd3_g15', dsis(47)= 'sndrD3_g15', + sattypes_rad(48)= 'sndrd4_g15', dsis(48)= 'sndrD4_g15', + sattypes_rad(49)= 'iasi_metop-a', dsis(49)= 'iasi_metop-a', + sattypes_rad(50)= 'seviri_m08', dsis(50)= 'seviri_m08', + sattypes_rad(51)= 'seviri_m09', dsis(51)= 'seviri_m09', + sattypes_rad(52)= 'seviri_m10', dsis(52)= 'seviri_m10', + sattypes_rad(53)= 'seviri_m11', dsis(53)= 'seviri_m11', + sattypes_rad(54)= 'amsua_metop-b', dsis(54)= 'amsua_metop-b', + sattypes_rad(55)= 'hirs4_metop-b', dsis(55)= 'hirs4_metop-b', + sattypes_rad(56)= 'mhs_metop-b', dsis(56)= 'mhs_metop-b', + sattypes_rad(57)= 'iasi_metop-b', dsis(57)= 'iasi_metop-b', + sattypes_rad(58)= 'avhrr_metop-b', dsis(58)= 'avhrr3_metop-b', + sattypes_rad(59)= 'atms_npp', dsis(59)= 'atms_npp', + sattypes_rad(60)= 'atms_n20', dsis(60)= 'atms_n20', + sattypes_rad(61)= 'cris_npp', dsis(61)= 'cris_npp', + sattypes_rad(62)= 'cris-fsr_npp', dsis(62)= 'cris-fsr_npp', + sattypes_rad(63)= 'cris-fsr_n20', dsis(63)= 'cris-fsr_n20', + sattypes_rad(64)= 'gmi_gpm', dsis(64)= 'gmi_gpm', + sattypes_rad(65)= 'saphir_meghat', dsis(65)= 'saphir_meghat', + sattypes_rad(66)= 'amsua_metop-c', dsis(66)= 'amsua_metop-c', + sattypes_rad(67)= 'mhs_metop-c', dsis(67)= 'mhs_metop-c', + sattypes_rad(68)= 'ahi_himawari8', dsis(68)= 'ahi_himawari8', + sattypes_rad(69)= 'abi_g16', dsis(69)= 'abi_g16', + sattypes_rad(70)= 'abi_g17', dsis(70)= 'abi_g17', + sattypes_rad(71)= 'iasi_metop-c', dsis(71)= 'iasi_metop-c', + sattypes_rad(72)= 'viirs-m_npp', dsis(72)= 'viirs-m_npp', + sattypes_rad(73)= 'viirs-m_j1', dsis(73)= 'viirs-m_j1', + sattypes_rad(74)= 'avhrr_metop-c', dsis(74)= 'avhrr3_metop-c', + sattypes_rad(75)= 'abi_g18', dsis(75)= 'abi_g18', + sattypes_rad(76)= 'ahi_himawari9', dsis(76)= 'ahi_himawari9', + sattypes_rad(77)= 'viirs-m_j2', dsis(77)= 'viirs-m_j2', + sattypes_rad(78)= 'atms_n21', dsis(78)= 'atms_n21', + sattypes_rad(79)= 'cris-fsr_n21', dsis(79)= 'cris-fsr_n21', + $SATOBS_ENKF +/ +&ozobs_enkf + sattypes_oz(1) = 'sbuv2_n16', + sattypes_oz(2) = 'sbuv2_n17', + sattypes_oz(3) = 'sbuv2_n18', + sattypes_oz(4) = 'sbuv2_n19', + sattypes_oz(5) = 'omi_aura', + sattypes_oz(6) = 'gome_metop-a', + sattypes_oz(7) = 'gome_metop-b', + sattypes_oz(8) = 'mls30_aura', + sattypes_oz(9) = 'ompsnp_npp', + sattypes_oz(10) = 'ompstc8_npp', + sattypes_oz(11) = 'ompstc8_n20', + sattypes_oz(12) = 'ompsnp_n20', + sattypes_oz(13) = 'ompslp_npp', + sattypes_oz(14) = 'ompstc8_n21', + sattypes_oz(15) = 'ompsnp_n21', + sattypes_oz(16) = 'gome_metop-c', + $OZOBS_ENKF +/ +EOFnml + +################################################################################ +# Run enkf update + +export OMP_NUM_THREADS=$NTHREADS_ENKF +export pgm=$ENKFEXEC +. prep_step + +$NCP $ENKFEXEC $DATA +$APRUN_ENKF ${DATA}/$(basename $ENKFEXEC) 1>stdout 2>stderr +export err=$?; err_chk + +# Cat runtime output files. +cat stdout stderr > "${COM_ATMOS_ANALYSIS_STAT}/${ENKFSTAT}" + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + + +exit $err diff --git a/scripts/exgfs_aero_init_aerosol.py b/scripts/exgfs_aero_init_aerosol.py index ecd968ce2f..db5e462f64 100755 --- a/scripts/exgfs_aero_init_aerosol.py +++ b/scripts/exgfs_aero_init_aerosol.py @@ -4,7 +4,7 @@ 'script'-level control of the aerosol init job. Reads environment variables, determines the atmospheric IC files and most recent available -restart files, then calls the script that merges the tracers from the restart files into +restart files, then calls the script that merges the tracers from the restart files into the IC files. INPUTS @@ -22,7 +22,7 @@ Additionally, the following data files are used: - Tiled atmospheric initial conditions that follow the naming pattern determined by `atm_base_pattern` and `atm_file_pattern` -- Restart files from a previous cycle that fit the pattern determined by restart_base_pattern and restart_file_pattern, +- Restart files from a previous cycle that fit the pattern determined by restart_base_pattern and restart_file_pattern, tracer_file_pattern, and dycore_file_pattern - A static file containing a list of tracers from the restart files to be added to the IC files, determined by `tracer_list_file_pattern` @@ -64,223 +64,229 @@ def main() -> None: - # Read in environment variables and make sure they exist - cdate = get_env_var("CDATE") - incr = int(get_env_var('STEP_GFS')) - fcst_length = int(get_env_var('FHMAX_GFS')) - cdump = get_env_var("CDUMP") - rot_dir = get_env_var("ROTDIR") - ush_gfs = get_env_var("USHgfs") - parm_gfs = get_env_var("PARMgfs") + # Read in environment variables and make sure they exist + cdate = get_env_var("CDATE") + incr = int(get_env_var('STEP_GFS')) + fcst_length = int(get_env_var('FHMAX_GFS')) + cdump = get_env_var("CDUMP") + rot_dir = get_env_var("ROTDIR") + ush_gfs = get_env_var("USHgfs") + parm_gfs = get_env_var("PARMgfs") - # os.chdir(data) + # os.chdir(data) - merge_script = merge_script_pattern.format(ush_gfs=ush_gfs) - tracer_list_file = tracer_list_file_pattern.format(parm_gfs=parm_gfs) + merge_script = merge_script_pattern.format(ush_gfs=ush_gfs) + tracer_list_file = tracer_list_file_pattern.format(parm_gfs=parm_gfs) - time = datetime.strptime(cdate, "%Y%m%d%H") - atm_source_path = time.strftime(atm_base_pattern.format(**locals())) + time = datetime.strptime(cdate, "%Y%m%d%H") + atm_source_path = time.strftime(atm_base_pattern.format(**locals())) - if(debug): - for var in ['merge_script', 'tracer_list_file', 'atm_source_path']: - print(f'{var} = {f"{var}"}') + if (debug): + for var in ['merge_script', 'tracer_list_file', 'atm_source_path']: + print(f'{var} = {f"{var}"}') - atm_files, ctrl_files = get_atm_files(atm_source_path) - tracer_files, rest_files, core_files = get_restart_files(time, incr, max_lookback, fcst_length, rot_dir, cdump) + atm_files, ctrl_files = get_atm_files(atm_source_path) + tracer_files, rest_files, core_files = get_restart_files(time, incr, max_lookback, fcst_length, rot_dir, cdump) - if (tracer_files is not None): - merge_tracers(merge_script, atm_files, tracer_files, rest_files, core_files[0], ctrl_files[0], tracer_list_file) + if (tracer_files is not None): + merge_tracers(merge_script, atm_files, tracer_files, rest_files, core_files[0], ctrl_files[0], tracer_list_file) - return + return def get_env_var(varname: str, fail_on_missing: bool = True) -> str: - ''' - Retrieve environment variable and exit or print warning if not defined - - Parameters - ---------- - varname : str - Environment variable to read - fail_on_missing : bool, optional - Whether to fail (if True) or print warning (False) if environment variable is not defined (default: True) - - Returns - ---------- - str - Value of the named variable - - Raises - ---------- - RuntimeError - If fail_on_missing is True and environment variable is not defined - - ''' - if(debug): - print(f'Trying to read envvar {varname}') - - var = os.environ.get(varname) - if(var is None): - if(fail_on_missing is True): - raise RuntimeError(f'Environment variable {varname} not set') - else: - print(f"WARNING: Environment variable {varname} not set, continuing using None") - if(debug): - print(f'\tValue: {var}') - return(var) + ''' + Retrieve environment variable and exit or print warning if not defined + + Parameters + ---------- + varname : str + Environment variable to read + fail_on_missing : bool, optional + Whether to fail (if True) or print warning (False) if environment variable is not defined (default: True) + + Returns + ---------- + str + Value of the named variable + + Raises + ---------- + RuntimeError + If fail_on_missing is True and environment variable is not defined + + ''' + if (debug): + print(f'Trying to read envvar {varname}') + + var = os.environ.get(varname) + if (var is None): + if (fail_on_missing is True): + raise RuntimeError(f'Environment variable {varname} not set') + else: + print(f"WARNING: Environment variable {varname} not set, continuing using None") + if (debug): + print(f'\tValue: {var}') + return (var) def get_atm_files(path: str) -> typing.List[typing.List[str]]: - ''' - Checks whether all atmospheric IC files exist in the given location and returns a list - of the filenames. - - Parameters - ---------- - path : str - Location where atmospheric IC files should exist - - Returns - ---------- - list of str - List of the full paths to each of the atmospheric files - - Raises - ---------- - IOError - If fail_on_missing is True and environment variable is not defined - - ''' - print(f'Checking for atm files in {path}') - - file_list = [] - for file_pattern in atm_file_pattern, atm_ctrl_pattern: - files = list(map(lambda tile: file_pattern.format(tile=tile, path=path), tiles)) - for file_name in files: - if(debug): - print(f"\tChecking for {file_name}") - if(not os.path.isfile(file_name)): - raise IOError(f"Atmosphere file {file_name} not found") - elif(debug): - print(f"\t\tFound {file_name}") - file_list = file_list + [files] - return file_list + ''' + Checks whether all atmospheric IC files exist in the given location and returns a list + of the filenames. + + Parameters + ---------- + path : str + Location where atmospheric IC files should exist + + Returns + ---------- + list of str + List of the full paths to each of the atmospheric files + + Raises + ---------- + IOError + If fail_on_missing is True and environment variable is not defined + + ''' + print(f'Checking for atm files in {path}') + + file_list = [] + for file_pattern in atm_file_pattern, atm_ctrl_pattern: + files = list(map(lambda tile: file_pattern.format(tile=tile, path=path), tiles)) + for file_name in files: + if (debug): + print(f"\tChecking for {file_name}") + if (not os.path.isfile(file_name)): + raise IOError(f"Atmosphere file {file_name} not found") + elif (debug): + print(f"\t\tFound {file_name}") + file_list = file_list + [files] + return file_list def get_restart_files(time: datetime, incr: int, max_lookback: int, fcst_length: int, rot_dir: str, cdump: str) -> typing.List[typing.List[str]]: - ''' - Determines the last cycle where all the necessary restart files are available. Ideally the immediate previous cycle - - Parameters - ---------- - time : datetime - Initial time for the current forecast - incr : int - Forecast cadence in hours - max_lookback : int - Maximum number of cycles to look back before failing - fcst_length : int - Length of forecast in hours - rot_dir : str - Path to the ROTDIR (COM) directory - cdump : str - CDUMP of current forecast portion (currently should always be 'gfs') - - Returns - ---------- - list of str - Full pathnames of all restart files needed from previous cycle (fv_core and fv_tracer files) - If all needed files aren't found within lookback period, An array of three None is returned instead. - - ''' - print(f"Looking for restart tracer files in {rot_dir}") - for lookback in map(lambda i: incr * (i + 1), range(max_lookback)): - if(lookback > fcst_length): - # Trying to look back farther than the length of a forecast - break - elif(lookback == fcst_length): - # Restart files at the end of the cycle don't have a timestamp - timestamp = "" - else: - timestamp = time.strftime("%Y%m%d.%H0000.") - - last_time = time - timedelta(hours=lookback) - - if(debug): - print(f"\tChecking {last_time}") - file_list = [] - file_base = last_time.strftime(restart_base_pattern.format(**locals())) - - for file_pattern in tracer_file_pattern, restart_file_pattern, dycore_file_pattern: - files = list(map(lambda tile: file_pattern.format(timestamp=timestamp, file_base=file_base, tile=tile), tiles)) - if(debug): - print(f"\t\tLooking for files {files} in directory {file_base}") - file_list = file_list + [files] - - found = all([os.path.isfile(file) for file in files for files in file_list]) - - if(found): - break - else: - print(last_time.strftime("Restart files not found for %Y%m%d_%H")) - - if(found): - return file_list - else: - print("WARNING: Unable to find restart files, will use zero fields") - return [ None, None, None ] + ''' + Determines the last cycle where all the necessary restart files are available. Ideally the immediate previous cycle + + Parameters + ---------- + time : datetime + Initial time for the current forecast + incr : int + Forecast cadence in hours + max_lookback : int + Maximum number of cycles to look back before failing + fcst_length : int + Length of forecast in hours + rot_dir : str + Path to the ROTDIR (COM) directory + cdump : str + CDUMP of current forecast portion (currently should always be 'gfs') + + Returns + ---------- + list of str + Full pathnames of all restart files needed from previous cycle (fv_core and fv_tracer files) + If all needed files aren't found within lookback period, An array of three None is returned instead. + + ''' + print(f"Looking for restart tracer files in {rot_dir}") + for lookback in map(lambda i: incr * (i + 1), range(max_lookback)): + if (lookback > fcst_length): + # Trying to look back farther than the length of a forecast + break + elif (lookback == fcst_length): + # Restart files at the end of the cycle don't have a timestamp + timestamp = "" + else: + timestamp = time.strftime("%Y%m%d.%H0000.") + + last_time = time - timedelta(hours=lookback) + + if (debug): + print(f"\tChecking {last_time}") + file_list = [] + file_base = last_time.strftime(restart_base_pattern.format(**locals())) + + for file_pattern in tracer_file_pattern, restart_file_pattern, dycore_file_pattern: + files = list(map(lambda tile: file_pattern.format(timestamp=timestamp, file_base=file_base, tile=tile), tiles)) + if (debug): + print(f"\t\tLooking for files {files} in directory {file_base}") + file_list = file_list + [files] + + found = all([os.path.isfile(file) for file in files for files in file_list]) + + if (found): + break + else: + print(last_time.strftime("Restart files not found for %Y%m%d_%H")) + + if (found): + return file_list + else: + print("WARNING: Unable to find restart files, will use zero fields") + return [None, None, None] # Merge tracer data into atmospheric data -def merge_tracers(merge_script: str, atm_files: typing.List[str], tracer_files: typing.List[str], rest_files: typing.List[str], core_file: str, ctrl_file: str, tracer_list_file: str) -> None: - ''' - Call the merger script to merge the tracers into the atmospheric IC files. Merged file is written to a temp file - which then overwrites the original upon successful completion of the script. - - Parameters - ---------- - merge_script : str - Full path to the merge script - atm_files : list of str - List of paths to atmospheric IC files - tracer_files : list of str - List of paths to tracer restart files - rest_files : list of str - List of paths to dycore tile restart files - core_file : str - Path of dycore restart file - ctrl_file : str - Path of control file - tracer_list_file : str - Full path to the file listing the tracer variables to add - - Returns - ---------- - None - - Raises - ---------- - ValueError - If `atm_files`, `tracer_files`, and `rest_files` are not all the same length - CalledProcessError - If merge script exits with a non-zero error - - ''' - print("Merging tracers") - if(len(atm_files) != len(tracer_files)): - raise ValueError("Atmosphere file list and tracer file list are not the same length") - - if(len(atm_files) != len(rest_files)): - raise ValueError("Atmosphere file list and dycore file list are not the same length") - - for atm_file, tracer_file, rest_file in zip(atm_files, tracer_files, rest_files): - if debug: - print(f"\tMerging tracers from {tracer_file} into {atm_file}") - temp_file = f'{atm_file}.tmp' - subprocess.run([merge_script, atm_file, tracer_file, core_file, ctrl_file, rest_file, tracer_list_file, temp_file], check=True) - os.replace(temp_file, atm_file) +def merge_tracers(merge_script: str, + atm_files: typing.List[str], + tracer_files: typing.List[str], + rest_files: typing.List[str], + core_file: str, + ctrl_file: str, + tracer_list_file: str) -> None: + ''' + Call the merger script to merge the tracers into the atmospheric IC files. Merged file is written to a temp file + which then overwrites the original upon successful completion of the script. + + Parameters + ---------- + merge_script : str + Full path to the merge script + atm_files : list of str + List of paths to atmospheric IC files + tracer_files : list of str + List of paths to tracer restart files + rest_files : list of str + List of paths to dycore tile restart files + core_file : str + Path of dycore restart file + ctrl_file : str + Path of control file + tracer_list_file : str + Full path to the file listing the tracer variables to add + + Returns + ---------- + None + + Raises + ---------- + ValueError + If `atm_files`, `tracer_files`, and `rest_files` are not all the same length + CalledProcessError + If merge script exits with a non-zero error + + ''' + print("Merging tracers") + if (len(atm_files) != len(tracer_files)): + raise ValueError("Atmosphere file list and tracer file list are not the same length") + + if (len(atm_files) != len(rest_files)): + raise ValueError("Atmosphere file list and dycore file list are not the same length") + + for atm_file, tracer_file, rest_file in zip(atm_files, tracer_files, rest_files): + if debug: + print(f"\tMerging tracers from {tracer_file} into {atm_file}") + temp_file = f'{atm_file}.tmp' + subprocess.run([merge_script, atm_file, tracer_file, core_file, ctrl_file, rest_file, tracer_list_file, temp_file], check=True) + os.replace(temp_file, atm_file) if __name__ == "__main__": - main() - exit(0) + main() + exit(0) diff --git a/scripts/exgfs_atmos_awips_20km_1p0deg.sh b/scripts/exgfs_atmos_awips_20km_1p0deg.sh index 1f4414c1b4..0f9868a506 100755 --- a/scripts/exgfs_atmos_awips_20km_1p0deg.sh +++ b/scripts/exgfs_atmos_awips_20km_1p0deg.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ############################################################################## # UTILITY SCRIPT NAME : exgfs_awips_20km_1p0deg.sh # DATE WRITTEN : 11/01/2017 @@ -10,59 +11,49 @@ # 1st argument - Forecast Hour - format of 3I (3 digits) # ############################################################################### -echo "------------------------------------------------" -echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing" -echo "------------------------------------------------" -echo "History: NOV 2017 - First implementation of this new script to " -echo " process GFS AWIPS 20km and 1.0 deg grids products " -echo " " +# echo "------------------------------------------------" +# echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing" +# echo "------------------------------------------------" +# echo "History: NOV 2017 - First implementation of this new script to " +# echo " process GFS AWIPS 20km and 1.0 deg grids products " +# echo " " ############################################################################### + +source "${HOMEgfs}/ush/preamble.sh" + fcsthrs="$1" num=$# -job_name=$(echo $job|sed 's/[jpt]gfs/gfs/') +job_name=${job/[jpt]gfs/gfs} -if test "$num" -ge 1 -then - echo "" - echo " Appropriate number of arguments were passed" +if (( num != 1 )); then echo "" -else + echo " FATAL ERROR: Incorrect number of arguments " echo "" - echo " Number of arguments were not passed " echo "" - echo "" - echo "Usage: $0 \$fcsthrs (3 digits) " + echo "Usage: $0 \${fcsthrs} (3 digits) " echo "" exit 16 fi -cd $DATA - -set -x +cd "${DATA}" || exit 2 ############################################### # Wait for the availability of the pgrb file ############################################### icnt=1 -while [ $icnt -lt 1000 ] -do - if [ -s $COMIN/${RUN}.${cycle}.pgrb2b.0p25.f$fcsthrs.idx ] - then - break - fi - - sleep 10 - icnt=$((icnt + 1)) - if [ $icnt -ge 180 ] - then - msg="ABORTING after 30 min of waiting for the GFS pgrb2 file!" - err_exit $msg - fi +while (( icnt < 1000 )); do + if [[ -s "${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx" ]]; then + break + fi + + sleep 10 + icnt=$((icnt + 1)) + if (( icnt >= 180 )); then + msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting" + err_exit "${msg}" + fi done -######################################## -msg="HAS BEGUN!" -postmsg "$jlogfile" "$msg" ######################################## echo " ------------------------------------------" @@ -75,7 +66,7 @@ echo "#######################################" echo " Process GRIB AWIP GRIB2 PRODUCTS " echo "#######################################" echo " " -set -x +set_trace # Set type of Interpolation for WGRIB2 export opt1=' -set_grib_type same -new_grid_winds earth ' @@ -95,172 +86,185 @@ export SCALEDEC=${SCALDEC:-$USHgfs/scale_dec.sh} # Process GFS GRIB AWIP PRODUCTS IN GRIB2 # ############################################################### -cp $COMIN/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs} tmpfile2${fcsthrs} -cp $COMIN/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs} tmpfile2b${fcsthrs} -cat tmpfile2${fcsthrs} tmpfile2b${fcsthrs} > tmpfile${fcsthrs} -$WGRIB2 tmpfile${fcsthrs} | grep -F -f $PARMproduct/gfs_awips_parmlist_g2 | $WGRIB2 -i -grib masterfile tmpfile${fcsthrs} +cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs}" "tmpfile2${fcsthrs}" +cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs}" "tmpfile2b${fcsthrs}" +cat "tmpfile2${fcsthrs}" "tmpfile2b${fcsthrs}" > "tmpfile${fcsthrs}" +${WGRIB2} "tmpfile${fcsthrs}" | grep -F -f "${PARMproduct}/gfs_awips_parmlist_g2" | \ + ${WGRIB2} -i -grib masterfile "tmpfile${fcsthrs}" export err=$? -if [[ $err -ne 0 ]] ; then +if [[ $err -ne 0 ]]; then echo " FATAL ERROR: masterfile does not exist." exit $err fi -$WGRIB2 masterfile -match ":PWAT:entire atmosphere" -grib gfs_pwat.grb -$WGRIB2 masterfile | grep -v ":PWAT:entire atmosphere" | $WGRIB2 -i -grib temp_gfs masterfile +${WGRIB2} masterfile -match ":PWAT:entire atmosphere" -grib gfs_pwat.grb +${WGRIB2} masterfile | grep -v ":PWAT:entire atmosphere" | ${WGRIB2} -i -grib temp_gfs masterfile ################################################################## # Process to change PWAT from level 200 to 10 (Entire Atmosphere) # in production defintion template (PDT) 4.0 ################################################################## -$WGRIB2 gfs_pwat.grb -set_byte 4 23 10 -grib gfs_pwat_levels_10.grb +${WGRIB2} gfs_pwat.grb -set_byte 4 23 10 -grib gfs_pwat_levels_10.grb export err=$?; err_chk cat temp_gfs gfs_pwat_levels_10.grb > tmp_masterfile -for GRID in conus ak prico pac 003 -do - case $GRID in - conus) - # Grid 20km_conus - CONUS - 20 km Quadruple Resolution (Lambert Conformal) - # export grid_20km_conus="30 6 0 0 0 0 0 0 369 257 12190000 226541000 8 25000000 265000000 20318000 20318000 0 64 25000000 25000000 0 0" - # $COPYGB2 -g "$grid_20km_conus" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridconus="lambert:265.0:25.0:25.0 226.541:369:20318.0 12.19:257:20318.0" - $WGRIB2 tmp_masterfile $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridconus awps_file_f${fcsthrs}_${GRID} - ;; - ak) - # Grid 20km_ak - Alaska - Double Resolution (Polar Stereographic) - # Redefined grid 217 for Alaska region - # export grid_20km_ak="20 6 0 0 0 0 0 0 277 213 30000000 187000000 8 60000000 225000000 22500000 22500000 0 64" - # $COPYGB2 -g "$grid_20km_ak" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridak="nps:210.0:60.0 170.0:277:22500 35.0:225:22500" - $WGRIB2 tmp_masterfile $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridak awps_file_f${fcsthrs}_${GRID} - ;; - prico) - # Grid 20km_prico - 0.25 degree Lat/Lon grid for Puerto Rico (20km) - # export grid_20km_prico="0 6 0 0 0 0 0 0 275 205 0 0 50750000 271750000 48 -250000 340250000 250000 250000 0" - # $COPYGB2 -g "$grid_20km_prico" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridprico="latlon 271.75:275:0.25 50.75:205:-0.25" - $WGRIB2 tmp_masterfile $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridprico awps_file_f${fcsthrs}_${GRID} - ;; - pac) - # Grid 20km_pac - 20 km Mercator grid for Pacific Region - # export grid_20km_pac="10 6 0 0 0 0 0 0 837 692 -45000000 110000000 48 20000000 65720000 270000000 64 0 20000000 20000000" - # NEW export grid_20km_pac="10 6 0 0 0 0 0 0 837 725 -45000000 110000000 48 20000000 65734500 270000000 64 0 20000000 20000000" - # $COPYGB2 -g "$grid_20km_pac" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} - - export gridpac="mercator:20.0 110.0:837:20000:270.0 -45.0:725:20000:65.7345" - $WGRIB2 tmp_masterfile $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridpac awps_file_f${fcsthrs}_${GRID} - ;; - 003) - ###################################################################### - # Process GFS GRIB AWIP 1.0 DEGREE (GRID 003) PRODUCTS IN GRIB2 # - ###################################################################### - export grid003="latlon 0:360:1.0 90:181:-1.0" - $WGRIB2 tmp_masterfile $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $grid003 awps_file_f${fcsthrs}_${GRID} - ;; +for GRID in conus ak prico pac 003; do + # shellcheck disable=SC2086 + case ${GRID} in + conus) + # Grid 20km_conus - CONUS - 20 km Quadruple Resolution (Lambert Conformal) + # export grid_20km_conus="30 6 0 0 0 0 0 0 369 257 12190000 226541000 8 25000000 265000000 20318000 20318000 0 64 25000000 25000000 0 0" + # $COPYGB2 -g "$grid_20km_conus" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} + + export gridconus="lambert:265.0:25.0:25.0 226.541:369:20318.0 12.19:257:20318.0" + ${WGRIB2} tmp_masterfile ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridconus} "awps_file_f${fcsthrs}_${GRID}" + ;; + ak) + # Grid 20km_ak - Alaska - Double Resolution (Polar Stereographic) + # Redefined grid 217 for Alaska region + # export grid_20km_ak="20 6 0 0 0 0 0 0 277 213 30000000 187000000 8 60000000 225000000 22500000 22500000 0 64" + # $COPYGB2 -g "$grid_20km_ak" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} + + export gridak="nps:210.0:60.0 170.0:277:22500 35.0:225:22500" + ${WGRIB2} tmp_masterfile ${opt1uv} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridak} "awps_file_f${fcsthrs}_${GRID}" + ;; + prico) + # Grid 20km_prico - 0.25 degree Lat/Lon grid for Puerto Rico (20km) + # export grid_20km_prico="0 6 0 0 0 0 0 0 275 205 0 0 50750000 271750000 48 -250000 340250000 250000 250000 0" + # $COPYGB2 -g "$grid_20km_prico" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} + + export gridprico="latlon 271.75:275:0.25 50.75:205:-0.25" + ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridprico} "awps_file_f${fcsthrs}_${GRID}" + ;; + pac) + # Grid 20km_pac - 20 km Mercator grid for Pacific Region + # export grid_20km_pac="10 6 0 0 0 0 0 0 837 692 -45000000 110000000 48 20000000 65720000 270000000 64 0 20000000 20000000" + # NEW export grid_20km_pac="10 6 0 0 0 0 0 0 837 725 -45000000 110000000 48 20000000 65734500 270000000 64 0 20000000 20000000" + # $COPYGB2 -g "$grid_20km_pac" -i0 -x tmp_masterfile awps_file_f${fcsthrs}_${GRID} + + export gridpac="mercator:20.0 110.0:837:20000:270.0 -45.0:725:20000:65.7345" + ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridpac} "awps_file_f${fcsthrs}_${GRID}" + ;; + 003) + ###################################################################### + # Process GFS GRIB AWIP 1.0 DEGREE (GRID 003) PRODUCTS IN GRIB2 # + ###################################################################### + export grid003="latlon 0:360:1.0 90:181:-1.0" + ${WGRIB2} tmp_masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${grid003} "awps_file_f${fcsthrs}_${GRID}" + ;; + *) + echo "FATAL ERROR: Unknown output grid ${GRID}" + exit 2 + ;; esac - $TRIMRH awps_file_f${fcsthrs}_${GRID} - $SCALEDEC awps_file_f${fcsthrs}_${GRID} - $GRB2INDEX awps_file_f${fcsthrs}_${GRID} awps_file_fi${fcsthrs}_${GRID} - -########################################################################### -# Checking fields in awps_file_f${fcsthrs}_${GRID} file -# before TOCGRIB2 adding WMO headers for AWIPS products. -# -# NOTE: numparm is the total of fields in grib2_awpgfs_20km_conusf000 file -########################################################################### -numparm=247 -numrec=$( $WGRIB2 awps_file_f${fcsthrs}_${GRID} | wc -l ) - -if [ $numrec -lt $numparm ] -then - msg="ABORTING : awps_file_f${fcsthrs}_${GRID} file is missing fields for AWIPS !" - err_exit $msg -fi + # shellcheck disable= + ${TRIMRH} "awps_file_f${fcsthrs}_${GRID}" + ${SCALEDEC} "awps_file_f${fcsthrs}_${GRID}" + ${GRB2INDEX} "awps_file_f${fcsthrs}_${GRID}" "awps_file_fi${fcsthrs}_${GRID}" + + ########################################################################### + # Checking fields in awps_file_f${fcsthrs}_${GRID} file + # before TOCGRIB2 adding WMO headers for AWIPS products. + # + # NOTE: numparm is the total of fields in grib2_awpgfs_20km_conusf000 file + ########################################################################### + numparm=247 + numrec=$( ${WGRIB2} "awps_file_f${fcsthrs}_${GRID}" | wc -l ) + + if (( numrec < numparm )); then + msg="FATAL ERROR: awps_file_f${fcsthrs}_${GRID} file is missing fields for AWIPS !" + err_exit "${msg}" || exit 10 + fi -# Processing AWIPS GRIB2 grids with WMO headers + # Processing AWIPS GRIB2 grids with WMO headers pgm=tocgrib2 export pgm; prep_step startmsg - if [ $GRID = "003" -a $(expr ${fcsthrs} % 6) -eq 0 ] ; then - export FORT11=awps_file_f${fcsthrs}_${GRID} - export FORT31=awps_file_fi${fcsthrs}_${GRID} - export FORT51=grib2.awpgfs${fcsthrs}.${GRID} + if [[ ${GRID} = "003" && $(( fcsthrs % 6 )) == 0 ]]; then + export FORT11="awps_file_f${fcsthrs}_${GRID}" + export FORT31="awps_file_fi${fcsthrs}_${GRID}" + export FORT51="grib2.awpgfs${fcsthrs}.${GRID}" - $TOCGRIB2 < $PARMwmo/grib2_awpgfs${fcsthrs}.${GRID} >> $pgmout 2> errfile + cp "${PARMwmo}/grib2_awpgfs${fcsthrs}.${GRID}" "parm_list" + if [[ ${DO_WAVE} != "YES" ]]; then + # Remove wave field it not running wave model + grep -vw "5WAVH" "parm_list" > "parm_list_temp" + mv "parm_list_temp" "parm_list" + fi + + ${TOCGRIB2} < "parm_list" >> "${pgmout}" 2> errfile export err=$?; err_chk echo " error from tocgrib2=",$err - if [ $SENDCOM = "YES" ] ; then + if [[ ${SENDCOM} == "YES" ]]; then ############################## - # Post Files to ${COMOUTwmo} + # Post Files to ${COM_ATMOS_WMO} ############################## - mv grib2.awpgfs${fcsthrs}.${GRID} ${COMOUTwmo}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} + mv "grib2.awpgfs${fcsthrs}.${GRID}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc}" ############################## # Distribute Data ############################## - if [ "$SENDDBN" = 'YES' -o "$SENDAWIP" = 'YES' ] ; then - $DBNROOT/bin/dbn_alert NTC_LOW $NET $job ${COMOUTwmo}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} + if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" NTC_LOW "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc}" else - msg="File ${COMOUTwmo}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} not posted to db_net." - postmsg "$jlogfile" "$msg" + echo "File ${COM_ATMOS_WMO}/grib2.awpgfs${fcsthrs}.${GRID}.gfs_awips_f${fcsthrs}_1p0deg_${cyc} not posted to db_net." fi fi - elif [ $GRID != "003" ] ; then - export FORT11=awps_file_f${fcsthrs}_${GRID} - export FORT31=awps_file_fi${fcsthrs}_${GRID} - export FORT51=grib2.awpgfs_20km_${GRID}_f${fcsthrs} + elif [[ ${GRID} != "003" ]]; then + export FORT11="awps_file_f${fcsthrs}_${GRID}" + export FORT31="awps_file_fi${fcsthrs}_${GRID}" + export FORT51="grib2.awpgfs_20km_${GRID}_f${fcsthrs}" + + cp "${PARMwmo}/grib2_awpgfs_20km_${GRID}f${fcsthrs}" "parm_list" + if [[ ${DO_WAVE} != "YES" ]]; then + # Remove wave field it not running wave model + grep -vw "5WAVH" "parm_list" > "parm_list_temp" + mv "parm_list_temp" "parm_list" + fi - $TOCGRIB2 < $PARMwmo/grib2_awpgfs_20km_${GRID}f${fcsthrs} >> $pgmout 2> errfile - export err=$? ;err_chk - echo " error from tocgrib2=",$err + ${TOCGRIB2} < "parm_list" >> "${pgmout}" 2> errfile + export err=$?; err_chk || exit "${err}" - if [ $SENDCOM = "YES" ] ; then + if [[ ${SENDCOM} = "YES" ]]; then - ############################## - # Post Files to ${COMOUTwmo} - ############################## + ############################## + # Post Files to ${COM_ATMOS_WMO} + ############################## - mv grib2.awpgfs_20km_${GRID}_f${fcsthrs} ${COMOUTwmo}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.$job_name + mv "grib2.awpgfs_20km_${GRID}_f${fcsthrs}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name}" - ############################## - # Distribute Data - ############################## + ############################## + # Distribute Data + ############################## - if [ "$SENDDBN" = 'YES' -o "$SENDAWIP" = 'YES' ] ; then - $DBNROOT/bin/dbn_alert NTC_LOW $NET $job ${COMOUTwmo}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.$job_name - else - msg="File ${COMOUTwmo}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.$job_name not posted to db_net." - postmsg "$jlogfile" "$msg" + if [[ "${SENDDBN}" = 'YES' || "${SENDAWIP}" = 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" NTC_LOW "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name}" + else + echo "File ${COM_ATMOS_WMO}/grib2.awpgfs_20km_${GRID}_f${fcsthrs}.${job_name} not posted to db_net." + fi fi - fi fi - msg="Awip Processing ${fcsthrs} hour completed normally" - postmsg "$jlogfile" "$msg" + echo "Awip Processing ${fcsthrs} hour completed normally" done -if [ -e "$pgmout" ] ; then - cat $pgmout +if [[ -e "${pgmout}" ]]; then + cat "${pgmout}" fi -############################################################################################ -# GOOD RUN -set +x -echo "**************JOB EXGFS_AWIPS_20KM_1P0DEG.SH.ECF COMPLETED NORMALLY ON THE WCOSS" -echo "**************JOB EXGFS_AWIPS_20KM_1P0DEG.SH.ECF COMPLETED NORMALLY ON THE WCOSS" -echo "**************JOB EXGFS_AWIPS_20KM_1P0DEG.SH.ECF COMPLETED NORMALLY ON THE WCOSS" -set -x -############################################################################################ - -msg="HAS COMPLETED NORMALLY!" -postmsg "$jlogfile" "$msg" ############## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_fbwind.sh b/scripts/exgfs_atmos_fbwind.sh index 2ca5870cbf..e7d0ff3d82 100755 --- a/scripts/exgfs_atmos_fbwind.sh +++ b/scripts/exgfs_atmos_fbwind.sh @@ -1,27 +1,27 @@ -#!/bin/ksh -echo "------------------------------------------------" -echo "JGFS_BULLS - 24hr GFS processing" -echo "------------------------------------------------" -echo "History: Jul 2004 - First implementation of this new script." -echo " FBWNDGFS (FB Winds) program for 15 sites outside" -echo " the Hawaiian Islands." -echo " Feb 2006 - L Sager Send bulletins to TOC via NTC. " -echo " Jul 2014 - B Vuong Modified to use GFS master GRIB2" -echo " and Add bulletins WINTEMV process." -echo " Sep 2016 - B Vuong Modified to use GFS 0p25 deg GRIB2" -echo " Nov 2019 - B Vuong Removed WINTEMV bulletin (retired)" +#! /usr/bin/env bash + +##################################################################### +# echo "------------------------------------------------" +# echo "JGFS_BULLS - 24hr GFS processing" +# echo "------------------------------------------------" +# echo "History: Jul 2004 - First implementation of this new script." +# echo " FBWNDGFS (FB Winds) program for 15 sites outside" +# echo " the Hawaiian Islands." +# echo " Feb 2006 - L Sager Send bulletins to TOC via NTC. " +# echo " Jul 2014 - B Vuong Modified to use GFS master GRIB2" +# echo " and Add bulletins WINTEMV process." +# echo " Sep 2016 - B Vuong Modified to use GFS 0p25 deg GRIB2" +# echo " Nov 2019 - B Vuong Removed WINTEMV bulletin (retired)" ##################################################################### +source "$HOMEgfs/ush/preamble.sh" + cd $DATA ###################### # Set up Here Files. ###################### -set -x -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - job_name=$(echo $job|sed 's/[jpt]gfs/gfs/') set +x @@ -31,7 +31,7 @@ echo " Process Bulletins of forecast winds and temps for Hawaii " echo " and 15 sites outside of the Hawaiian Islands. " echo "#############################################################" echo " " -set -x +set_trace export pgm=bulls_fbwndgfs . prep_step @@ -79,38 +79,10 @@ fi if test "$SENDDBN" = 'YES' then # make_ntc_bull.pl WMOBH NONE KWNO NONE tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name - ${UTILgfs}/ush/make_ntc_bull.pl WMOBH NONE KWNO NONE tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name + ${USHgfs}/make_ntc_bull.pl WMOBH NONE KWNO NONE tran.fbwnd_pacific ${COMOUTwmo}/tran.fbwnd_pacific.$job_name fi -# -# EMC is proposing to retire WINTEMV bulletin in GFS v16.0 -# - -# if test ${cycle} = 't00z' -o ${cycle} = 't12z' -# then -# -# set +x -# echo " " -# echo "#################################################" -# echo " Process 06, 12, 18 and 24 fcsthrs WINTEM Bulletins. " -# echo "#################################################" -# echo " " -# set -x -# sh $USHgfs/mkwintem.sh -# -#fi - -##################################################################### -# GOOD RUN -set +x -echo "**************JOB JGFS_FBWIND COMPLETED NORMALLY ON IBM-SP" -echo "**************JOB JGFS_FBWIND COMPLETED NORMALLY ON IBM-SP" -echo "**************JOB JGFS_FBWIND COMPLETED NORMALLY ON IBM-SP" -set -x ##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh b/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh index 4de9d33736..64562daeed 100755 --- a/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh +++ b/scripts/exgfs_atmos_gempak_gif_ncdc_skew_t.sh @@ -1,16 +1,15 @@ -#!/bin/sh +#! /usr/bin/env bash + ############################################################## # Add the NCDC GIF processing to the end of the gempak_gif job # There is no timing issue with the NCDC GIF, so it is # okay to just add it here. If timing becomes a problem # in the future, we should move it above somewhere else. ############################################################## -export PS4='exgempakgif_ncdc_skewt:$SECONDS + ' -set -xa + +source "$HOMEgfs/ush/preamble.sh" cd $DATA -msg="The NCDC GIF processing has begun" -postmsg "$jlogfile" "$msg" export NTS=$USHgempak/restore @@ -30,25 +29,23 @@ then while [ $icnt -lt 1000 ] do if [ -r ${COMIN}/${RUN}_${PDY}${cyc}f0${fhr} ] ; then - sleep 5 + sleep 5 break else - msg="The process is waiting ... ${GRIBFILE} file to proceed." - postmsg "${jlogfile}" "$msg" + echo "The process is waiting ... ${GRIBFILE} file to proceed." sleep 20 let "icnt=icnt+1" fi if [ $icnt -ge $maxtries ] then - msg="ABORTING: after 1 hour of waiting for ${GRIBFILE} file at F$fhr to end." - postmsg "${jlogfile}" "$msg" + echo "ABORTING: after 1 hour of waiting for ${GRIBFILE} file at F$fhr to end." export err=7 ; err_chk exit $err fi done cp ${COMIN}/${RUN}_${PDY}${cyc}f0${fhr} gem_grids${fhr}.gem - + # if [ $cyc -eq 00 -o $cyc -eq 12 ] #then $USHgempak/gempak_${RUN}_f${fhr}_gif.sh @@ -58,21 +55,19 @@ then fi #################################################################################### -echo "-----------------------------------------------------------------------------" -echo "GFS MAG postprocessing script exmag_sigman_skew_k_gfs_gif_ncdc_skew_t.sh " -echo "-----------------------------------------------------------------------------" -echo "History: Mar 2012 added to processing for enhanced MAG skew_t" -echo "2012-03-11 Mabe -- reworked script to add significant level " -echo " data to existing mandatory level data in a new file" -echo "2013-04-24 Mabe -- Reworked to remove unneeded output with " -echo " conversion to WCOSS" +# echo "-----------------------------------------------------------------------------" +# echo "GFS MAG postprocessing script exmag_sigman_skew_k_gfs_gif_ncdc_skew_t.sh " +# echo "-----------------------------------------------------------------------------" +# echo "History: Mar 2012 added to processing for enhanced MAG skew_t" +# echo "2012-03-11 Mabe -- reworked script to add significant level " +# echo " data to existing mandatory level data in a new file" +# echo "2013-04-24 Mabe -- Reworked to remove unneeded output with " +# echo " conversion to WCOSS" # Add ms to filename to make it different since it has both mandatory # and significant level data $COMOUT/${RUN}.${cycle}.msupperair # $COMOUT/${RUN}.${cycle}.msupperairtble ##################################################################################### -set -x - cd $DATA export RSHPDY=$(echo $PDY | cut -c5-)$(echo $PDY | cut -c3-4) @@ -80,7 +75,7 @@ export RSHPDY=$(echo $PDY | cut -c5-)$(echo $PDY | cut -c3-4) cp $HOMEgfs/gempak/dictionaries/sonde.land.tbl . cp $HOMEgfs/gempak/dictionaries/metar.tbl . sort -k 2n,2 metar.tbl > metar_stnm.tbl -cp $COMINgfs/${model}.$cycle.adpupa.tm00.bufr_d fort.40 +cp $COMINobsproc/${model}.$cycle.adpupa.tm00.bufr_d fort.40 export err=$? if [[ $err -ne 0 ]] ; then echo " File ${model}.$cycle.adpupa.tm00.bufr_d does not exist." @@ -112,14 +107,10 @@ fi fi ############################################################ -# GOOD RUN -set +x -echo "********** JGFS_ATMOS_GEMPAK_NCDC_UPAPGIF COMPLETED" -set -x -############################################################ + if [ -e "$pgmout" ] ; then cat $pgmout fi -msg="HAS COMPLETED NORMALLY!" + exit diff --git a/scripts/exgfs_atmos_gempak_meta.sh b/scripts/exgfs_atmos_gempak_meta.sh index dfd5b38392..04f4f1fc5c 100755 --- a/scripts/exgfs_atmos_gempak_meta.sh +++ b/scripts/exgfs_atmos_gempak_meta.sh @@ -1,9 +1,6 @@ -#!/bin/ksh +#! /usr/bin/env bash -set -x - -msg="JOB $job HAS BEGUN" -postmsg "$jlogfile" "$msg" +source "$HOMEgfs/ush/preamble.sh" cd $DATA @@ -14,13 +11,13 @@ fhr=$fhend export numproc=23 while [ $fhr -ge $fhbeg ] ; do - typeset -Z3 fhr - ls -l $COMIN/$GEMGRD1${fhr} - err1=$? - if [ $err1 -eq 0 -o $fhr -eq $fhbeg ] ; then - break - fi - fhr=$(expr $fhr - $fhinc) + fhr=$(printf "%03d" $fhr) + ls -l $COMIN/$GEMGRD1${fhr} + err1=$? + if [ $err1 -eq 0 -o $fhr -eq $fhbeg ] ; then + break + fi + fhr=$(expr $fhr - $fhinc) done maxtries=180 @@ -30,7 +27,7 @@ do_all=0 #loop through and process needed forecast hours while [ $fhr -le $fhend ] do - # + # # First check to see if this is a rerun. If so make all Meta files if [ $fhr -gt 126 -a $first_time -eq 0 ] ; then do_all=1 @@ -54,8 +51,7 @@ do fi if [ $icnt -ge $maxtries ] then - msg="ABORTING after 1 hour of waiting for gempak grid F$fhr to end." - postmsg "${jlogfile}" "$msg" + echo "ABORTING after 1 hour of waiting for gempak grid F$fhr to end." export err=7 ; err_chk exit $err fi @@ -73,9 +69,7 @@ do rm $DATA/poescript # fi - if [ $fhr -lt 100 ] ; then - typeset -Z2 fhr - fi + fhr=$(printf "%02d" $fhr) if [ $do_all -eq 1 ] ; then do_all=0 @@ -109,20 +103,19 @@ do # If this is the final fcst hour, alert the # file to all centers. -# - if [ $fhr -ge $fhend ] ; then +# + if [ 10#$fhr -ge $fhend ] ; then export DBN_ALERT_TYPE=GFS_METAFILE_LAST fi export fend=$fhr sleep 20 -# mpirun.lsf ntasks=${NTASKS_META:-$(cat $DATA/poescript | wc -l)} ptile=${PTILE_META:-4} threads=${NTHREADS_META:-1} export OMP_NUM_THREADS=$threads - APRUN="mpirun -n $ntasks cfp " + APRUN="mpiexec -l -n $ntasks -ppn $ntasks --cpu-bind verbose,core cfp" APRUN_METACFP=${APRUN_METACFP:-$APRUN} APRUNCFP=$(eval echo $APRUN_METACFP) @@ -130,7 +123,7 @@ do $APRUNCFP $DATA/poescript export err=$?; err_chk - typeset -Z3 fhr + fhr=$(printf "%03d" $fhr) if [ $fhr -eq 126 ] ; then let fhr=fhr+6 else @@ -139,14 +132,7 @@ do done ##################################################################### -# GOOD RUN -set +x -echo "**************JOB GFS_META COMPLETED NORMALLY on the IBM-SP" -echo "**************JOB GFS_META COMPLETED NORMALLY on the IBM-SP" -echo "**************JOB GFS_META COMPLETED NORMALLY on the IBM-SP" -set -x -##################################################################### -echo EXITING $0 + exit # diff --git a/scripts/exgfs_atmos_goes_nawips.sh b/scripts/exgfs_atmos_goes_nawips.sh index c18f2b8b42..76ae067280 100755 --- a/scripts/exgfs_atmos_goes_nawips.sh +++ b/scripts/exgfs_atmos_goes_nawips.sh @@ -1,16 +1,17 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################### -echo "----------------------------------------------------" -echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -echo "----------------------------------------------------" -echo "History: Mar 2000 - First implementation of this new script." -echo "S Lilly: May 2008 - add logic to make sure that all of the " -echo " data produced from the restricted ECMWF" -echo " data on the CCS is properly protected." -echo "C. Magee: 10/2013 - swap X and Y for rtgssthr Atl and Pac." +# echo "----------------------------------------------------" +# echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" +# echo "----------------------------------------------------" +# echo "History: Mar 2000 - First implementation of this new script." +# echo "S Lilly: May 2008 - add logic to make sure that all of the " +# echo " data produced from the restricted ECMWF" +# echo " data on the CCS is properly protected." +# echo "C. Magee: 10/2013 - swap X and Y for rtgssthr Atl and Pac." ##################################################################### -set -xa +source "$HOMEgfs/ush/preamble.sh" cd $DATA @@ -19,15 +20,12 @@ cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - # # NAGRIB_TABLE=$FIXgempak/nagrib.tbl NAGRIB=$GEMEXE/nagrib2 # -entry=$(grep "^$RUN " $NAGRIB_TABLE | awk 'index($1,"#") != 1 {print $0}') +entry=$(grep "^$RUN2 " $NAGRIB_TABLE | awk 'index($1,"#") != 1 {print $0}') if [ "$entry" != "" ] ; then cpyfil=$(echo $entry | awk 'BEGIN {FS="|"} {print $2}') @@ -53,18 +51,12 @@ pdsext=no maxtries=180 fhcnt=$fstart while [ $fhcnt -le $fend ] ; do -# if [ $fhcnt -ge 100 ] ; then - typeset -Z3 fhr -# else -# typeset -Z2 fhr -# fi - fhr=$fhcnt + fhr=$(printf "%03d" $fhcnt) fhcnt3=$(expr $fhr % 3) - fhr3=$fhcnt - typeset -Z3 fhr3 + fhr3=$(printf "03d" $fhcnt) GRIBIN=$COMIN/${model}.${cycle}.${GRIB}${fhr}${EXT} - GEMGRD=${RUN}_${PDY}${cyc}f${fhr3} + GEMGRD=${RUN2}_${PDY}${cyc}f${fhr3} GRIBIN_chk=$GRIBIN @@ -79,8 +71,7 @@ while [ $fhcnt -le $fend ] ; do fi if [ $icnt -ge $maxtries ] then - msg="ABORTING after 1 hour of waiting for F$fhr to end." - postmsg "${jlogfile}" "$msg" + echo "ABORTING after 1 hour of waiting for F$fhr to end." export err=7 ; err_chk exit $err fi @@ -126,17 +117,7 @@ EOF let fhcnt=fhcnt+finc done -##################################################################### -# GOOD RUN -set +x -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -set -x ##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_grib2_special_npoess.sh b/scripts/exgfs_atmos_grib2_special_npoess.sh index 1e81f9815b..4009a8e66a 100755 --- a/scripts/exgfs_atmos_grib2_special_npoess.sh +++ b/scripts/exgfs_atmos_grib2_special_npoess.sh @@ -1,18 +1,16 @@ -#!/bin/ksh +#! /usr/bin/env bash + ##################################################################### -echo "-----------------------------------------------------" -echo " exglobal_grib2_special_npoess.sh" -echo " Jan 2008 - Chuang - Produces 1x1 degree special Grib from master." -echo "-----------------------------------------------------" +# echo "-----------------------------------------------------" +# echo " exglobal_grib2_special_npoess.sh" +# echo " Jan 2008 - Chuang - Produces 1x1 degree special Grib from master." +# echo "-----------------------------------------------------" ##################################################################### -set -x +source "$HOMEgfs/ush/preamble.sh" cd $DATA -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" - ############################################################ # Define Variables: # ----------------- @@ -49,71 +47,62 @@ SLEEP_LOOP_MAX=$(expr $SLEEP_TIME / $SLEEP_INT) ############################################################################## export SHOUR=000 export FHOUR=024 -export fhr=$SHOUR -typeset -Z3 fhr + ############################################################ # Loop Through the Post Forecast Files ############################################################ -while test $fhr -le $FHOUR -do - - ############################### - # Start Looping for the - # existence of the restart files - ############################### - export pgm="postcheck" - ic=1 - while [ $ic -le $SLEEP_LOOP_MAX ] - do - if test -f $COMIN/gfs.t${cyc}z.pgrb2b.0p50.f${fhr}.idx - then - break - else - ic=$(expr $ic + 1) - sleep $SLEEP_INT - fi - ############################### - # If we reach this point assume - # fcst job never reached restart - # period and error exit - ############################### - if [ $ic -eq $SLEEP_LOOP_MAX ] - then - export err=9 - err_chk - fi - done - -###################################################################### -# Process Global NPOESS 0.50 GFS GRID PRODUCTS IN GRIB2 F000 - F024 # -###################################################################### - set -x - msg="Starting half degree grib generation for fhr=$fhr" - postmsg "$jlogfile" "$msg" - - paramlist=${PARMproduct}/global_npoess_paramlist_g2 - cp $COMIN/gfs.t${cyc}z.pgrb2.0p50.f${fhr} tmpfile2 - cp $COMIN/gfs.t${cyc}z.pgrb2b.0p50.f${fhr} tmpfile2b - cat tmpfile2 tmpfile2b > tmpfile - $WGRIB2 tmpfile | grep -F -f $paramlist | $WGRIB2 -i -grib pgb2file tmpfile - export err=$?; err_chk - - if test $SENDCOM = "YES" - then - cp pgb2file $COMOUT/${RUN}.${cycle}.pgrb2f${fhr}.npoess - - if test $SENDDBN = "YES" - then - $DBNROOT/bin/dbn_alert MODEL GFS_PGBNPOESS $job $COMOUT/${RUN}.${cycle}.pgrb2f${fhr}.npoess - else - msg="File ${RUN}.${cycle}.pgrb2f${fhr}.npoess not posted to db_net." - postmsg "$msg" - fi - echo "$PDY$cyc$fhr" > $COMOUT/${RUN}.t${cyc}z.control.halfdeg.npoess - fi - rm tmpfile pgb2file - export fhr=$(expr $fhr + $FHINC) - typeset -Z3 fhr +for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do + + fhr3=$(printf "%03d" "${fhr}") + + ############################### + # Start Looping for the + # existence of the restart files + ############################### + export pgm="postcheck" + ic=1 + while (( ic <= SLEEP_LOOP_MAX )); do + if [[ -f "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2b.0p50.f${fhr3}.idx" ]]; then + break + else + ic=$((ic + 1)) + sleep "${SLEEP_INT}" + fi + ############################### + # If we reach this point assume + # fcst job never reached restart + # period and error exit + ############################### + if (( ic == SLEEP_LOOP_MAX )); then + echo "FATAL ERROR: 0p50 grib file not available after max sleep time" + export err=9 + err_chk || exit "${err}" + fi + done + + ###################################################################### + # Process Global NPOESS 0.50 GFS GRID PRODUCTS IN GRIB2 F000 - F024 # + ###################################################################### + paramlist=${PARMproduct}/global_npoess_paramlist_g2 + cp "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2.0p50.f${fhr3}" tmpfile2 + cp "${COM_ATMOS_GRIB_0p50}/gfs.t${cyc}z.pgrb2b.0p50.f${fhr3}" tmpfile2b + cat tmpfile2 tmpfile2b > tmpfile + ${WGRIB2} tmpfile | grep -F -f ${paramlist} | ${WGRIB2} -i -grib pgb2file tmpfile + export err=$?; err_chk + + if [[ ${SENDCOM} == "YES" ]]; then + cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess" + + if [[ ${SENDDBN} == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGBNPOESS "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.pgrb2f${fhr3}.npoess" + else + msg="File ${RUN}.${cycle}.pgrb2f${fhr3}.npoess not posted to db_net." + postmsg "${msg}" || echo "${msg}" + fi + echo "${PDY}${cyc}${fhr3}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.halfdeg.npoess" + fi + rm tmpfile pgb2file done @@ -122,99 +111,81 @@ done ################################################################ export SHOUR=000 export FHOUR=180 -export fhr=$SHOUR -typeset -Z3 fhr ################################# # Process GFS PGRB2_SPECIAL_POST ################################# -while test $fhr -le $FHOUR -do - ############################### - # Start Looping for the - # existence of the restart files - ############################### - set +x - export pgm="postcheck" - ic=1 - while [ $ic -le $SLEEP_LOOP_MAX ] - do - if test -f $restart_file$fhr - then - break - else - ic=$(expr $ic + 1) - sleep $SLEEP_INT - fi - ############################### - # If we reach this point assume - # fcst job never reached restart - # period and error exit - ############################### - if [ $ic -eq $SLEEP_LOOP_MAX ] - then - export err=9 - err_chk - fi - done - set -x - - msg="Starting special grib file generation for fhr=$fhr" - postmsg "$jlogfile" "$msg" - - ############################### - # Put restart files into /nwges - # for backup to start Model Fcst - ############################### - - cp $COMIN/${RUN}.t${cyc}z.special.grb2f$fhr masterfile - -# $COPYGB2 -g "0 6 0 0 0 0 0 0 360 181 0 0 90000000 0 48 -90000000 359000000 1000000 1000000 0" -i1,1 -x masterfile pgb2file - -# export grid1p0="latlon 0:360:1.0 90:181:-1.0" - export grid0p25="latlon 0:1440:0.25 90:721:-0.25" - $WGRIB2 masterfile $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $grid0p25 pgb2file - -# creating higher resolution goes files for US centers -# $COPYGB2 -g "30 6 0 0 0 0 0 0 349 277 1000000 214500000 8 50000000 253000000 32463000 32463000 0 64 50000000 50000000 0 0" -i1,1 -x masterfile pgb2file2 - - export gridconus="lambert:253.0:50.0:50.0 214.5:349:32463.0 1.0:277:32463.0" - $WGRIB2 masterfile $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid $gridconus pgb2file2 - - $WGRIB2 pgb2file -s > pgb2ifile - - if test $SENDCOM = "YES" - then - - cp pgb2file $COMOUT/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr} - cp pgb2ifile $COMOUT/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}.idx - - cp pgb2file2 $COMOUT/${RUN}.${cycle}.goessimpgrb2f${fhr}.grd221 - - if test $SENDDBN = "YES" - then - $DBNROOT/bin/dbn_alert MODEL GFS_GOESSIMPGB2_0P25 $job $COMOUT/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr} - $DBNROOT/bin/dbn_alert MODEL GFS_GOESSIMPGB2_0P25_WIDX $job $COMOUT/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}.idx - $DBNROOT/bin/dbn_alert MODEL GFS_GOESSIMGRD221_PGB2 $job $COMOUT/${RUN}.${cycle}.goessimpgrb2f${fhr}.grd221 - fi - - echo "$PDY$cyc$fhr" > $COMOUT/${RUN}.t${cyc}z.control.goessimpgrb - fi - rm pgb2file2 pgb2ifile - - if test "$SENDECF" = 'YES' - then - export fhour=$(expr ${fhr} % 6 ) - fi - - export fhr=$(expr $fhr + $FHINC) - typeset -Z3 fhr -done +for (( fhr=$((10#${SHOUR})); fhr <= $((10#${FHOUR})); fhr = fhr + FHINC )); do + + fhr3=$(printf "%03d" "${fhr}") + + ############################### + # Start Looping for the + # existence of the restart files + ############################### + set +x + export pgm="postcheck" + ic=1 + while (( ic <= SLEEP_LOOP_MAX )); do + if [[ -f "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.special.grb2if${fhr3}.idx" ]]; then + break + else + ic=$((ic + 1)) + sleep "${SLEEP_INT}" + fi + ############################### + # If we reach this point assume + # fcst job never reached restart + # period and error exit + ############################### + if (( ic == SLEEP_LOOP_MAX )); then + echo "FATAL ERROR: Special goes grib file not available after max sleep time" + export err=9 + err_chk || exit "${err}" + fi + done + set_trace + ############################### + # Put restart files into /nwges + # for backup to start Model Fcst + ############################### + cp "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.special.grb2if${fhr3}" masterfile + export grid0p25="latlon 0:1440:0.25 90:721:-0.25" + ${WGRIB2} masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${grid0p25} pgb2file + + export gridconus="lambert:253.0:50.0:50.0 214.5:349:32463.0 1.0:277:32463.0" + ${WGRIB2} masterfile ${opt1} ${opt21} ${opt22} ${opt23} ${opt24} ${opt25} ${opt26} \ + ${opt27} ${opt28} -new_grid ${gridconus} pgb2file2 + + ${WGRIB2} pgb2file -s > pgb2ifile + + if [[ ${SENDCOM} == "YES" ]]; then + + cp pgb2file "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr3}" + cp pgb2ifile "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr3}.idx" + cp pgb2file2 "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2f${fhr3}.grd221" + + if [[ ${SENDDBN} == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMPGB2_0P25 "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMPGB2_0P25_WIDX "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2.0p25.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_GOESSIMGRD221_PGB2 "${job}" \ + "${COM_ATMOS_GOES}/${RUN}.${cycle}.goessimpgrb2f${fhr}.grd221" + fi + + echo "${PDY}${cyc}${fhr}" > "${COM_ATMOS_GOES}/${RUN}.t${cyc}z.control.goessimpgrb" + fi + rm pgb2file2 pgb2ifile + + if [[ ${SENDECF} == "YES" ]]; then + # TODO Does this even do anything? + export fhour=$(( fhr % 6 )) + fi -######################################################## +done -msg='ENDED NORMALLY.' -postmsg "$jlogfile" "$msg" ################## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_grib_awips.sh b/scripts/exgfs_atmos_grib_awips.sh index 2e5ec91526..f10508626f 100755 --- a/scripts/exgfs_atmos_grib_awips.sh +++ b/scripts/exgfs_atmos_grib_awips.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ###################################################################### # UTILITY SCRIPT NAME : exgfs_grib_awips.sh # DATE WRITTEN : 10/04/2004 @@ -9,72 +10,57 @@ # 1st argument - Forecast Hour - format of 2I # ##################################################################### -echo "------------------------------------------------" -echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing" -echo "------------------------------------------------" -echo "History: OCT 2004 - First implementation of this new script." -echo " JUN 2014 - Modified to remove process for AWIPS in GRIB2" -echo " to script exgfs_grib_awips_g2.sh and this " -echo " script only process AWIPS GRIB1 (211 and 225)" -echo " AUG 2015 - Modified for WCOSS phase2" -echo " FEB 2019 - Removed grid 225" +# echo "------------------------------------------------" +# echo "JGFS_AWIPS_00/06/12/18 GFS postprocessing" +# echo "------------------------------------------------" +# echo "History: OCT 2004 - First implementation of this new script." +# echo " JUN 2014 - Modified to remove process for AWIPS in GRIB2" +# echo " to script exgfs_grib_awips_g2.sh and this " +# echo " script only process AWIPS GRIB1 (211 and 225)" +# echo " AUG 2015 - Modified for WCOSS phase2" +# echo " FEB 2019 - Removed grid 225" ##################################################################### -set +x -fcsthrs="$1" -num=$# -job_name=$(echo $job|sed 's/[jpt]gfs/gfs/') -typeset -Z3 fcsthrs +source "${HOMEgfs}/ush/preamble.sh" -export PS4='gfs_grib_awips:f$fcsthrs:$SECONDS + ' -export SCALEDEC=${SCALDEC:-$USHgfs/scale_dec.sh} +fcsthrs="$1" +num=$# +job_name=${job/[jpt]gfs/gfs} -#if [ $fhcsthrs -t 100 ]; then -# fcsthrs=0$fcsthrs -#fi -if test "$num" -ge 1 -then +if (( num != 1 )); then echo "" - echo " Appropriate number of arguments were passed" + echo " FATAL ERROR: Incorrect number of arguments " echo "" -else echo "" - echo " FATAL ERROR: Number of arguments were not passed." - echo "" - echo "" - echo "Usage: $0 \$fcsthrs (3-digit) " + echo "Usage: $0 \${fcsthrs} (3 digits) " echo "" exit 16 fi -cd $DATA/awips_g1 +cd "${DATA}" || exit 2 + +fcsthrs=$(printf "%03d" "${fcsthrs}") -set -x +export SCALEDEC=${SCALDEC:-${USHgfs}/scale_dec.sh} + +cd ${DATA}/awips_g1 || exit 2 ############################################### # Wait for the availability of the pgrb file ############################################### icnt=1 -while [ $icnt -lt 1000 ] -do - if [ -s $COMIN/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx ] - then - break - fi - - sleep 10 - icnt=$((icnt + 1)) - if [ $icnt -ge 180 ] - then - msg="ABORTING after 30 min of waiting for the pgrb file!" - err_exit $msg - fi -done +while (( icnt < 1000 )); do + if [[ -s "${COM_ATMOS_GRIB_0p25}/${RUN}.${cycle}.pgrb2b.0p25.f${fcsthrs}.idx" ]]; then + break + fi -######################################## -msg="HAS BEGUN!" -postmsg "$jlogfile" "$msg" -######################################## + sleep 10 + icnt=$((icnt + 1)) + if (( icnt >= 180 )); then + msg="FATAL ERROR: No GFS pgrb2 file after 30 min of waiting" + err_exit "${msg}" + fi +done echo " ------------------------------------------" echo " BEGIN MAKING GFS GRIB1 AWIPS PRODUCTS" @@ -86,79 +72,68 @@ echo "###############################################" echo " Process GFS GRIB1 AWIP PRODUCTS (211) " echo "###############################################" echo " " -set -x +set_trace - cp $COMIN/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs} tmpfile2 - cp $COMIN/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs} tmpfile2b - cat tmpfile2 tmpfile2b > tmpfile - $WGRIB2 tmpfile | grep -F -f $PARMproduct/gfs_awips_parmlist_g2 | $WGRIB2 -i -grib masterfile tmpfile - $SCALEDEC masterfile - $CNVGRIB -g21 masterfile masterfile.grib1 +cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2.0p25.f${fcsthrs}" "tmpfile2" +cp "${COM_ATMOS_GRIB_0p25}/gfs.t${cyc}z.pgrb2b.0p25.f${fcsthrs}" "tmpfile2b" +cat tmpfile2 tmpfile2b > tmpfile +${WGRIB2} tmpfile | grep -F -f "${PARMproduct}/gfs_awips_parmlist_g2" | \ + ${WGRIB2} -i -grib masterfile tmpfile +${SCALEDEC} masterfile +${CNVGRIB} -g21 masterfile masterfile.grib1 - ln -s masterfile.grib1 fort.11 +ln -s masterfile.grib1 fort.11 -# $OVERGRIDID << EOF - ${UTILgfs}/exec/overgridid << EOF +"${HOMEgfs}/exec/overgridid.x" << EOF 255 EOF - mv fort.51 master.grbf${fcsthrs} - rm fort.11 +mv fort.51 "master.grbf${fcsthrs}" +rm fort.11 - $GRBINDEX master.grbf${fcsthrs} master.grbif${fcsthrs} +${GRBINDEX} "master.grbf${fcsthrs}" "master.grbif${fcsthrs}" ############################################################### # Process GFS GRIB1 AWIP GRIDS 211 PRODUCTS ############################################################### - executable=mkgfsawps - DBNALERT_TYPE=GRIB_LOW +DBNALERT_TYPE=GRIB_LOW - startmsg +startmsg # GRID=211 out to 240 hours: - export GRID=211 - export FORT11=master.grbf${fcsthrs} - export FORT31=master.grbif${fcsthrs} - export FORT51=xtrn.awpgfs${fcsthrs}.${GRID} +export GRID=211 +export FORT11="master.grbf${fcsthrs}" +export FORT31="master.grbif${fcsthrs}" +export FORT51="xtrn.awpgfs${fcsthrs}.${GRID}" # $MKGFSAWPS < $PARMwmo/grib_awpgfs${fcsthrs}.${GRID} parm=KWBC >> $pgmout 2>errfile - ${UTILgfs}/exec/mkgfsawps < $PARMwmo/grib_awpgfs${fcsthrs}.${GRID} parm=KWBC >> $pgmout 2>errfile - export err=$?; err_chk - ############################## - # Post Files to ${COMOUTwmo} - ############################## +"${HOMEgfs}/exec/mkgfsawps.x" < "${PARMwmo}/grib_awpgfs${fcsthrs}.${GRID}" parm=KWBC >> "${pgmout}" 2>errfile +export err=$?; err_chk +############################## +# Post Files to ${COM_ATMOS_WMO} +############################## - if test "$SENDCOM" = 'YES' - then - cp xtrn.awpgfs${fcsthrs}.${GRID} ${COMOUTwmo}/xtrn.awpgfs${fcsthrs}.${GRID}.$job_name +if [[ "${SENDCOM}" = 'YES' ]]; then + cp "xtrn.awpgfs${fcsthrs}.${GRID}" "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}" - ############################## - # Distribute Data - ############################## + ############################## + # Distribute Data + ############################## - if [ "$SENDDBN" = 'YES' -o "$SENDAWIP" = 'YES' ] ; then - $DBNROOT/bin/dbn_alert $DBNALERT_TYPE $NET $job ${COMOUTwmo}/xtrn.awpgfs${fcsthrs}.${GRID}.$job_name - else - msg="File $output_grb.$job_name not posted to db_net." - postmsg "$jlogfile" "$msg" - fi + if [[ "${SENDDBN}" == 'YES' || "${SENDAWIP}" == 'YES' ]] ; then + "${DBNROOT}/bin/dbn_alert" "${DBNALERT_TYPE}" "${NET}" "${job}" \ + "${COM_ATMOS_WMO}/xtrn.awpgfs${fcsthrs}.${GRID}.${job_name}" + else + echo "File ${output_grb}.${job_name} not posted to db_net." fi +fi -if [ -e "$pgmout" ] ; then - cat $pgmout +if [[ -e "${pgmout}" ]] ; then + cat ${pgmout} fi -############################################################################### -# GOOD RUN -set +x -echo "**************JOB EXGFS_GRIB_AWIPS.SH.ECF COMPLETED NORMALLY ON THE IBM" -echo "**************JOB EXGFS_GRIB_AWIPS.SH.ECF COMPLETED NORMALLY ON THE IBM" -echo "**************JOB EXGFS_GRIB_AWIPS.SH.ECF COMPLETED NORMALLY ON THE IBM" -set -x ############################################################################### -msg="HAS COMPLETED NORMALLY!" -postmsg "$jlogfile" "$msg" ############## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_nawips.sh b/scripts/exgfs_atmos_nawips.sh index 8e1ba652d6..07b0ca8b3f 100755 --- a/scripts/exgfs_atmos_nawips.sh +++ b/scripts/exgfs_atmos_nawips.sh @@ -1,15 +1,16 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################### -echo "----------------------------------------------------" -echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -echo "----------------------------------------------------" -echo "History: Mar 2000 - First implementation of this new script." -echo "S Lilly: May 2008 - add logic to make sure that all of the " -echo " data produced from the restricted ECMWF" -echo " data on the CCS is properly protected." +# echo "----------------------------------------------------" +# echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" +# echo "----------------------------------------------------" +# echo "History: Mar 2000 - First implementation of this new script." +# echo "S Lilly: May 2008 - add logic to make sure that all of the " +# echo " data produced from the restricted ECMWF" +# echo " data on the CCS is properly protected." ##################################################################### -set -xa +source "$HOMEgfs/ush/preamble.sh" "${2}" #### If EMC GFS PARA runs hourly file are not available, The ILPOST #### will set to 3 hour in EMC GFS PARA. @@ -17,22 +18,17 @@ set -xa export ILPOST=${ILPOST:-1} cd $DATA -RUN=$1 +RUN2=$1 fend=$2 DBN_ALERT_TYPE=$3 +destination=${4} -export 'PS4=$RUN:$SECONDS + ' - -DATA_RUN=$DATA/$RUN +DATA_RUN=$DATA/$RUN2 mkdir -p $DATA_RUN cd $DATA_RUN -msg="Begin job for $job" -postmsg "$jlogfile" "$msg" - - # -NAGRIB=$GEMEXE/nagrib2_nc +NAGRIB=$GEMEXE/nagrib2 # cpyfil=gds @@ -49,156 +45,135 @@ maxtries=360 fhcnt=$fstart while [ $fhcnt -le $fend ] ; do -if mkdir lock.$fhcnt ; then - cd lock.$fhcnt - cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl - cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl - cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl - cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl - - typeset -Z3 fhr - - fhr=$fhcnt - fhcnt3=$(expr $fhr % 3) - - fhr3=$fhcnt - typeset -Z3 fhr3 - - GEMGRD=${RUN}_${PDY}${cyc}f${fhr3} - -# Set type of Interpolation for WGRIB2 - export opt1=' -set_grib_type same -new_grid_winds earth ' - export opt1uv=' -set_grib_type same -new_grid_winds grid ' - export opt21=' -new_grid_interpolation bilinear -if ' - export opt22=":(CSNOW|CRAIN|CFRZR|CICEP|ICSEV):" - export opt23=' -new_grid_interpolation neighbor -fi ' - export opt24=' -set_bitmap 1 -set_grib_max_bits 16 -if ' - export opt25=":(APCP|ACPCP|PRATE|CPRAT):" - export opt26=' -set_grib_max_bits 25 -fi -if ' - export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" - export opt28=' -new_grid_interpolation budget -fi ' - export TRIMRH=$HOMEgfs/ush/trim_rh.sh - - if [ $RUN = "gfs_0p50" ]; then - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.0p50.f${fhr} - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.0p50.f${fhr}.idx - elif [ $RUN = "gfs_0p25" -o $RUN = "gdas_0p25" -o $RUN = "gfs35_atl" -o $RUN = "gfs35_pac" -o $RUN = "gfs40" ]; then - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.0p25.f${fhr} - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.0p25.f${fhr}.idx - else - export GRIBIN=$COMIN/${model}.${cycle}.pgrb2.1p00.f${fhr} - GRIBIN_chk=$COMIN/${model}.${cycle}.pgrb2.1p00.f${fhr}.idx - fi - - icnt=1 - while [ $icnt -lt 1000 ] - do - if [ -r $GRIBIN_chk ] ; then - sleep 5 - break - else - msg="The process is waiting ... ${GRIBIN_chk} file to proceed." - postmsg "${jlogfile}" "$msg" - sleep 10 - let "icnt=icnt+1" - fi - if [ $icnt -ge $maxtries ] - then - msg="ABORTING: after 1 hour of waiting for ${GRIBIN_chk} file at F$fhr to end." - postmsg "${jlogfile}" "$msg" - export err=7 ; err_chk - exit $err - fi - done - -case $RUN in - gfs35_pac) -# $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 0 75125000 130000000 48 17000000 260000000 312000 312000 0" -x $GRIBIN grib$fhr -# NEW define gfs35_pac="0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" -# $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" -x $GRIBIN grib$fhr - - export gfs35_pac='latlon 130.0:416:0.312 75.125:186:-0.312' - $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_pac} grib$fhr - $TRIMRH grib$fhr - ;; - gfs35_atl) -# $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 0 75125000 230000000 48 -500000 20000000 312000 312000 0" -x $GRIBIN grib$fhr -# NEW define gfs35_atl="0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" -# $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" -x $GRIBIN grib$fhr - - export gfs35_atl='latlon 230.0:480:0.312 75.125:242:-0.312' - $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_atl} grib$fhr - $TRIMRH grib$fhr - ;; - gfs40) -# $COPYGB2 -g "30 6 0 0 0 0 0 0 185 129 12190000 226541000 8 25000000 265000000 40635000 40635000 0 64 25000000 25000000 0 0" -x $GRIBIN grib$fhr - - export gfs40='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0' - $WGRIB2 $GRIBIN $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs40} grib$fhr - $TRIMRH grib$fhr - ;; - *) - cp $GRIBIN grib$fhr -esac - - export pgm="nagrib2 F$fhr" - startmsg - - $NAGRIB << EOF - GBFILE = grib$fhr - INDXFL = - GDOUTF = $GEMGRD - PROJ = $proj - GRDAREA = $grdarea - KXKY = $kxky - MAXGRD = $maxgrd - CPYFIL = $cpyfil - GAREA = $garea - OUTPUT = $output - GBTBLS = $gbtbls - GBDIAG = - PDSEXT = $pdsext - l - r + if mkdir "lock.${fhcnt}" ; then + cd lock.$fhcnt + cp $FIXgempak/g2varswmo2.tbl g2varswmo2.tbl + cp $FIXgempak/g2vcrdwmo2.tbl g2vcrdwmo2.tbl + cp $FIXgempak/g2varsncep1.tbl g2varsncep1.tbl + cp $FIXgempak/g2vcrdncep1.tbl g2vcrdncep1.tbl + + fhr=$(printf "%03d" "${fhcnt}") + + GEMGRD=${RUN2}_${PDY}${cyc}f${fhr} + + # Set type of Interpolation for WGRIB2 + export opt1=' -set_grib_type same -new_grid_winds earth ' + export opt1uv=' -set_grib_type same -new_grid_winds grid ' + export opt21=' -new_grid_interpolation bilinear -if ' + export opt22=":(CSNOW|CRAIN|CFRZR|CICEP|ICSEV):" + export opt23=' -new_grid_interpolation neighbor -fi ' + export opt24=' -set_bitmap 1 -set_grib_max_bits 16 -if ' + export opt25=":(APCP|ACPCP|PRATE|CPRAT):" + export opt26=' -set_grib_max_bits 25 -fi -if ' + export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" + export opt28=' -new_grid_interpolation budget -fi ' + export TRIMRH=$HOMEgfs/ush/trim_rh.sh + + case ${RUN2} in + # TODO: Why aren't we interpolating from the 0p25 grids for 35-km and 40-km? + 'gfs_0p50' | 'gfs_0p25') res=${RUN2: -4};; + *) res="1p00";; + esac + + source_var="COM_ATMOS_GRIB_${res}" + export GRIBIN="${!source_var}/${model}.${cycle}.pgrb2.${res}.f${fhr}" + GRIBIN_chk="${!source_var}/${model}.${cycle}.pgrb2.${res}.f${fhr}.idx" + + icnt=1 + while [ $icnt -lt 1000 ]; do + if [ -r $GRIBIN_chk ] ; then + sleep 5 + break + else + echo "The process is waiting ... ${GRIBIN_chk} file to proceed." + sleep 10 + let "icnt=icnt+1" + fi + if [ $icnt -ge $maxtries ]; then + echo "ABORTING: after 1 hour of waiting for ${GRIBIN_chk} file at F$fhr to end." + export err=7 ; err_chk + exit $err + fi + done + + case $RUN2 in + gfs35_pac) + # $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 0 75125000 130000000 48 17000000 260000000 312000 312000 0" -x $GRIBIN grib$fhr + # NEW define gfs35_pac="0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" + # $COPYGB2 -g "0 6 0 0 0 0 0 0 416 186 0 -1 75125000 130000000 48 17405000 259480000 312000 312000 0" -x $GRIBIN grib$fhr + + export gfs35_pac='latlon 130.0:416:0.312 75.125:186:-0.312' + $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_pac} grib$fhr + $TRIMRH grib$fhr + ;; + gfs35_atl) + # $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 0 75125000 230000000 48 -500000 20000000 312000 312000 0" -x $GRIBIN grib$fhr + # NEW define gfs35_atl="0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" + # $COPYGB2 -g "0 6 0 0 0 0 0 0 480 242 0 -1 75125000 230000000 48 -67000 19448000 312000 312000 0" -x $GRIBIN grib$fhr + + export gfs35_atl='latlon 230.0:480:0.312 75.125:242:-0.312' + $WGRIB2 $GRIBIN $opt1 $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs35_atl} grib$fhr + $TRIMRH grib$fhr + ;; + gfs40) + # $COPYGB2 -g "30 6 0 0 0 0 0 0 185 129 12190000 226541000 8 25000000 265000000 40635000 40635000 0 64 25000000 25000000 0 0" -x $GRIBIN grib$fhr + + export gfs40='lambert:265.0:25.0:25.0 226.541:185:40635.0 12.19:129:40635.0' + $WGRIB2 $GRIBIN $opt1uv $opt21 $opt22 $opt23 $opt24 $opt25 $opt26 $opt27 $opt28 -new_grid ${gfs40} grib$fhr + $TRIMRH grib$fhr + ;; + *) + cp $GRIBIN grib$fhr + esac + + export pgm="nagrib2 F$fhr" + startmsg + + $NAGRIB << EOF + GBFILE = grib$fhr + INDXFL = + GDOUTF = $GEMGRD + PROJ = $proj + GRDAREA = $grdarea + KXKY = $kxky + MAXGRD = $maxgrd + CPYFIL = $cpyfil + GAREA = $garea + OUTPUT = $output + GBTBLS = $gbtbls + GBDIAG = + PDSEXT = $pdsext + l + r EOF - export err=$?;err_chk - - if [ $SENDCOM = "YES" ] ; then - cpfs $GEMGRD $COMOUT/$GEMGRD - if [ $SENDDBN = "YES" ] ; then - $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \ - $COMOUT/$GEMGRD - fi - fi - cd $DATA_RUN -else + export err=$?;err_chk + + if [[ ${SENDCOM} == "YES" ]] ; then + cpfs "${GEMGRD}" "${destination}/${GEMGRD}" + if [[ ${SENDDBN} == "YES" ]] ; then + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" \ + "${destination}/${GEMGRD}" + fi + fi + cd $DATA_RUN + else if [ $fhcnt -ge 240 ] ; then - if [ $fhcnt -lt 276 -a $RUN = "gfs_0p50" ] ; then - let fhcnt=fhcnt+6 - else - let fhcnt=fhcnt+12 - fi - elif [ $fhcnt -lt 120 -a $RUN = "gfs_0p25" ] ; then -#### let fhcnt=fhcnt+1 - let fhcnt=fhcnt+$ILPOST + if [ $fhcnt -lt 276 -a $RUN2 = "gfs_0p50" ] ; then + let fhcnt=fhcnt+6 + else + let fhcnt=fhcnt+12 + fi + elif [ $fhcnt -lt 120 -a $RUN2 = "gfs_0p25" ] ; then + #### let fhcnt=fhcnt+1 + let fhcnt=fhcnt+$ILPOST else - let fhcnt=fhcnt+finc + fhcnt=$((ILPOST > finc ? fhcnt+ILPOST : fhcnt+finc )) fi -fi + fi done $GEMEXE/gpend ##################################################################### -# GOOD RUN -set +x -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -set -x -##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_post.sh b/scripts/exgfs_atmos_post.sh new file mode 100755 index 0000000000..40bde0f731 --- /dev/null +++ b/scripts/exgfs_atmos_post.sh @@ -0,0 +1,513 @@ +#! /usr/bin/env bash + +##################################################################### +# echo "-----------------------------------------------------" +# echo " exgfs_nceppost.sh" +# echo " Apr 99 - Michaud - Generated to post global forecast" +# echo " Mar 03 - Zhu - Add post for 0.5x0.5 degree" +# echo " Nov 03 - Gilbert - Modified from exglobal_post.sh.sms" +# echo " to run only one master post job." +# echo " Jan 07 - Cooke - Add DBNet Alert for Master files" +# echo " May 07 - Chuang - Modified scripts to run unified post" +# echo " Feb 10 - Carlis - Add 12-hr accum precip bucket at f192" +# echo " Jun 12 - Wang - Add option for grb2" +# echo " Jul 14 - Carlis - Add 0.25 deg master " +# echo " Mar 17 - F Yang - Modified for running fv3gfs" +# echo " Aug 17 - Meng - Add flags for turning on/off flx, gtg " +# echo " and satellite look like file creation" +# echo " and use 3-digit forecast hour naming" +# echo " post output files" +# echo " Dec 17 - Meng - Link sfc data file to flxfile " +# echo " since fv3gfs does not output sfc files any more." +# echo " Dec 17 - Meng - Add fv3gfs_downstream_nems.sh for pgb processing " +# echo " Jan 18 - Meng - Add flag PGBF for truning on/off pgb processing. " +# echo " Jan 18 - Meng - For EE2 standard, move IDRT POSTGPVARS setting" +# echo " from j-job script." +# echo " Feb 18 - Meng - Removed legacy setting for generating grib1 data" +# echo " and reading sigio model outputs." +# echo " Aug 20 - Meng - Remove .ecf extentsion per EE2 review." +# echo " Sep 20 - Meng - Update clean up files per EE2 review." +# echo " Dec 20 - Meng - Add alert for special data file." +# echo " Mar 21 - Meng - Update POSTGRB2TBL default setting." +# echo " Jun 21 - Mao - Instead of err_chk, catch err and print out" +# echo " WAFS failure warnings to avoid job crashing" +# echo " Oct 21 - Meng - Remove jlogfile for wcoss2 transition." +# echo " Feb 22 - Lin - Exception handling if anl input not found." +# echo "-----------------------------------------------------" +##################################################################### + +source "${HOMEgfs}/ush/preamble.sh" + +cd "${DATA}" || exit 1 + +export POSTGPSH=${POSTGPSH:-${USHgfs}/gfs_post.sh} +export GFSDOWNSH=${GFSDOWNSH:-${USHgfs}/fv3gfs_downstream_nems.sh} +export GFSDOWNSHF=${GFSDOWNSHF:-${USHgfs}/inter_flux.sh} +export GFSDWNSH=${GFSDWNSH:-${USHgfs}/fv3gfs_dwn_nems.sh} +export TRIMRH=${TRIMRH:-${USHgfs}/trim_rh.sh} +export MODICEC=${MODICEC:-${USHgfs}/mod_icec.sh} +export INLINE_POST=${INLINE_POST:-".false."} + +############################################################ +# Define Variables: +# ----------------- +# FH is the current forecast hour. +# SLEEP_TIME is the number of seconds to sleep before exiting with error. +# SLEEP_INT is the number of seconds to sleep between restrt file checks. +# restart_file is the name of the file to key off of to kick off post. +############################################################ +export IO=${LONB:-1440} +export JO=${LATB:-721} +export OUTTYP=${OUTTYP:-4} +export FLXF=${FLXF:-"YES"} +export FLXGF=${FLXGF:-"YES"} +export GOESF=${GOESF:-"YES"} +export WAFSF=${WAFSF:-"NO"} +export PGBF=${PGBF:-"YES"} +export TCYC=${TCYC:-".t${cyc}z."} +export PREFIX=${PREFIX:-${RUN}${TCYC}} +export machine=${machine:-WCOSS2} + +########################### +# Specify Output layers +########################### +export POSTGPVARS="KPO=57,PO=1000.,975.,950.,925.,900.,875.,850.,825.,800.,775.,750.,725.,700.,675.,650.,625.,600.,575.,550.,525.,500.,475.,450.,425.,400.,375.,350.,325.,300.,275.,250.,225.,200.,175.,150.,125.,100.,70.,50.,40.,30.,20.,15.,10.,7.,5.,3.,2.,1.,0.7,0.4,0.2,0.1,0.07,0.04,0.02,0.01," + +########################################################## +# Specify variable to directly output pgrb2 files for GDAS/GFS +########################################################## +export IDRT=${IDRT:-0} # IDRT=0 is setting for outputting grib files on lat/lon grid + +############################################################ +# Post Analysis Files before starting the Forecast Post +############################################################ +# Process analysis when post_times is 00 +stime="$(echo "${post_times}" | cut -c1-3)" +export stime +export loganl="${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" + +if [[ "${stime}" = "anl" ]]; then + if [[ -f "${loganl}" ]]; then + # add new environmental variables for running new ncep post + # Validation date + export VDATE=${PDY}${cyc} + # specify output file name from chgres which is input file name to nceppost + # if model already runs gfs io, make sure GFSOUT is linked to the gfsio file + # new imported variable for global_nceppost.sh + export GFSOUT=${PREFIX}gfsioanl + + # specify smaller control file for GDAS because GDAS does not + # produce flux file, the default will be /nwprod/parm/gfs_cntrl.parm + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + # use grib2 nomonic table in product g2tmpl directory as default + export POSTGRB2TBL=${POSTGRB2TBL:-${g2tmpl_ROOT}/share/params_grib2_tbl_new} + export PostFlatFile=${PostFlatFile:-${PARMpost}/postxconfig-NT-GFS-ANL.txt} + export CTLFILE=${PARMpost}/postcntrl_gfs_anl.xml + fi + + [[ -f flxfile ]] && rm flxfile ; [[ -f nemsfile ]] && rm nemsfile + ln -fs "${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" nemsfile + export NEMSINP=nemsfile + ln -fs "${COM_ATMOS_ANALYSIS}/${PREFIX}sfcanl.nc" flxfile + export FLXINP=flxfile + + export PGBOUT=pgbfile + export PGIOUT=pgifile + export PGBOUT2=pgbfile.grib2 + export PGIOUT2=pgifile.grib2.idx + export IGEN=${IGEN_ANL} + export FILTER=0 + + ${POSTGPSH} + export err=$?; err_chk + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + mv "${PGBOUT}" "${PGBOUT2}" + fi + + # Process pgb files + if [[ "${PGBF}" = 'YES' ]]; then + export FH=-1 + export downset=${downset:-2} + ${GFSDOWNSH} + export err=$?; err_chk + fi + + if [[ "${SENDCOM}" = 'YES' ]]; then + export fhr3=anl + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + MASTERANL=${PREFIX}master.grb2${fhr3} + MASTERANLIDX=${PREFIX}master.grb2i${fhr3} + cp "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERANL}" + ${GRB2INDEX} "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERANLIDX}" + fi + + if [[ "${SENDDBN}" = 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_MSC_sfcanl "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}sfcanl.nc" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_SA "${job}" "${COM_ATMOS_ANALYSIS}/${PREFIX}atmanl.nc" + if [[ "${PGBF}" = 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25 "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25_WIDX "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.anl.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25 "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25_WIDX "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.anl.idx" + + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5 "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5_WIDX "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.anl.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5 "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5_WIDX "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.anl.idx" + + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0 "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0_WIDX "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0 "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.anl" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0_WIDX "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.anl.idx" + fi + fi + fi + [[ -f pgbfile.grib2 ]] && rm pgbfile.grib2 + # ecflow_client --event release_pgrb2_anl + + ########################## WAFS U/V/T analysis start ########################## + # U/V/T on ICAO standard atmospheric pressure levels for WAFS verification + if [[ "${WAFSF}" = "YES" ]]; then + if [[ "${RUN}" = "gfs" && "${GRIBVERSION}" = 'grib2' ]]; then + export OUTTYP=${OUTTYP:-4} + + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-WAFS-ANL.txt" + export CTLFILE="${PARMpost}/postcntrl_gfs_wafs_anl.xml" + + export PGBOUT=wafsfile + export PGIOUT=wafsifile + + ${POSTGPSH} + export err=$? + if (( err != 0 )); then + echo " *** GFS POST WARNING: WAFS output failed for analysis, err=${err}" + else + # WAFS package doesn't process this part. + # Need to be saved for WAFS U/V/T verification, + # resolution higher than WAFS 1.25 deg for future compatibility + wafsgrid="latlon 0:1440:0.25 90:721:-0.25" + ${WGRIB2} "${PGBOUT}" -set_grib_type same -new_grid_winds earth \ + -new_grid_interpolation bilinear -set_bitmap 1 \ + -new_grid ${wafsgrid} "${PGBOUT}.tmp" + + if [[ "${SENDCOM}" = "YES" ]]; then + cp "${PGBOUT}.tmp" "${COM_ATMOS_WAFS}/${PREFIX}wafs.0p25.anl" + ${WGRIB2} -s "${PGBOUT}.tmp" > "${COM_ATMOS_WAFS}/${PREFIX}wafs.0p25.anl.idx" + + # if [ $SENDDBN = YES ]; then + # $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_GB2 $job $COMOUT/${PREFIX}wafs.0p25.anl + # $DBNROOT/bin/dbn_alert MODEL GFS_WAFS_GB2__WIDX $job $COMOUT/${PREFIX}wafs.0p25.anl.idx + # fi + fi + rm "${PGBOUT}" "${PGBOUT}.tmp" + fi + fi + fi + ########################## WAFS U/V/T analysis end ########################## + else + #### atmanl file not found need failing job + echo " *** FATAL ERROR: No model anl file output " + export err=9 + err_chk + fi +else ## not_anl if_stime + SLEEP_LOOP_MAX=$(( SLEEP_TIME / SLEEP_INT )) + + ############################################################ + # Loop Through the Post Forecast Files + ############################################################ + + for fhr in ${post_times}; do + echo "Start processing fhr=${post_times}" + ############################### + # Start Looping for the + # existence of the restart files + ############################### + export pgm="postcheck" + ic=1 + while (( ic <= SLEEP_LOOP_MAX )); do + if [[ -f "${restart_file}${fhr}.txt" ]]; then + break + else + ic=$(( ic + 1 )) + sleep "${SLEEP_INT}" + fi + ############################### + # If we reach this point assume + # fcst job never reached restart + # period and error exit + ############################### + if (( ic == SLEEP_LOOP_MAX )); then + echo " *** FATAL ERROR: No model output for f${fhr} " + export err=9 + err_chk + fi + done + + ############################### + # Put restart files into /nwges + # for backup to start Model Fcst + ############################### + [[ -f flxfile ]] && rm flxfile ; [[ -f nemsfile ]] && rm nemsfile + ln -fs "${COM_ATMOS_HISTORY}/${PREFIX}atmf${fhr}.nc" nemsfile + export NEMSINP=nemsfile + ln -fs "${COM_ATMOS_HISTORY}/${PREFIX}sfcf${fhr}.nc" flxfile + export FLXINP=flxfile + + if (( fhr > 0 )); then + export IGEN=${IGEN_FCST} + else + export IGEN=${IGEN_ANL} + fi + + # No shellcheck, NDATE is not a typo + # shellcheck disable=SC2153 + VDATE="$(${NDATE} "+${fhr}" "${PDY}${cyc}")" + # shellcheck disable= + export VDATE + export OUTTYP=${OUTTYP:-4} + export GFSOUT="${PREFIX}gfsio${fhr}" + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + export POSTGRB2TBL="${POSTGRB2TBL:-${g2tmpl_ROOT}/share/params_grib2_tbl_new}" + export PostFlatFile="${PostFlatFile:-${PARMpost}/postxconfig-NT-GFS.txt}" + + if [[ "${RUN}" = "gfs" ]]; then + export IGEN=${IGEN_GFS} + if (( fhr > 0 )); then export IGEN=${IGEN_FCST} ; fi + else + export IGEN=${IGEN_GDAS_ANL} + if (( fhr > 0 )); then export IGEN=${IGEN_FCST} ; fi + fi + if [[ "${RUN}" = "gfs" ]]; then + if (( fhr == 0 )); then + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-F00.txt" + export CTLFILE="${PARMpost}/postcntrl_gfs_f00.xml" + else + export CTLFILE="${CTLFILEGFS:-${PARMpost}/postcntrl_gfs.xml}" + fi + else + if (( fhr == 0 )); then + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-F00.txt" + export CTLFILE="${CTLFILEGFS:-${PARMpost}/postcntrl_gfs_f00.xml}" + else + export CTLFILE="${CTLFILEGFS:-${PARMpost}/postcntrl_gfs.xml}" + fi + fi + fi + + export FLXIOUT=flxifile + export PGBOUT=pgbfile + export PGIOUT=pgifile + export PGBOUT2=pgbfile.grib2 + export PGIOUT2=pgifile.grib2.idx + export FILTER=0 + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + MASTERFL=${PREFIX}master.grb2f${fhr} + MASTERFLIDX=${PREFIX}master.grb2if${fhr} + fi + + if [[ "${INLINE_POST}" = ".false." ]]; then + ${POSTGPSH} + else + cp -p "${COM_ATMOS_MASTER}/${MASTERFL}" "${PGBOUT}" + fi + export err=$?; err_chk + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + mv "${PGBOUT}" "${PGBOUT2}" + fi + + # Process pgb files + if [[ "${PGBF}" = 'YES' ]]; then + export FH=$(( 10#${fhr} + 0 )) + export downset=${downset:-2} + ${GFSDOWNSH} + export err=$?; err_chk + fi + + if [[ "${SENDCOM}" = "YES" ]]; then + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + if [[ "${INLINE_POST}" = ".false." ]]; then + cp "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERFL}" + fi + ${GRB2INDEX} "${PGBOUT2}" "${COM_ATMOS_MASTER}/${MASTERFLIDX}" + fi + + if [[ "${SENDDBN}" = 'YES' ]]; then + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + if [[ "${PGBF}" = 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25 "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P25_WIDX "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25 "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P25_WIDX "${job}" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.f${fhr}.idx" + + if [[ -s "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr}" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5 "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_0P5_WIDX "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5 "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_0P5_WIDX "${job}" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.f${fhr}.idx" + fi + + if [[ -s "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0 "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2_1P0_WIDX "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr}.idx" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0 "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.f${fhr}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PGB2B_1P0_WIDX "${job}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.f${fhr}.idx" + fi + fi + fi + fi + + export fhr + "${USHgfs}/gfs_transfer.sh" + fi + [[ -f pgbfile.grib2 ]] && rm pgbfile.grib2 + + + # use post to generate GFS Grib2 Flux file as model generated Flux file + # will be in nemsio format after FY17 upgrade. + if (( OUTTYP == 4 )) && [[ "${FLXF}" == "YES" ]]; then + if (( fhr == 0 )); then + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-FLUX-F00.txt" + export CTLFILE="${PARMpost}/postcntrl_gfs_flux_f00.xml" + else + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-FLUX.txt" + export CTLFILE="${PARMpost}/postcntrl_gfs_flux.xml" + fi + export PGBOUT=fluxfile + export FILTER=0 + export FLUXFL=${PREFIX}sfluxgrbf${fhr}.grib2 + FLUXFLIDX=${PREFIX}sfluxgrbf${fhr}.grib2.idx + + if [[ "${INLINE_POST}" = ".false." ]]; then + ${POSTGPSH} + export err=$?; err_chk + mv fluxfile "${COM_ATMOS_MASTER}/${FLUXFL}" + fi + ${WGRIB2} -s "${COM_ATMOS_MASTER}/${FLUXFL}" > "${COM_ATMOS_MASTER}/${FLUXFLIDX}" + + #Add extra flux.1p00 file for coupled + if [[ "${FLXGF}" = 'YES' ]]; then + export FH=$(( 10#${fhr} + 0 )) + ${GFSDOWNSHF} + export err=$?; err_chk + fi + + if [[ "${SENDDBN}" = 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_SGB_GB2 "${job}" "${COM_ATMOS_MASTER}/${FLUXFL}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_SGB_GB2_WIDX "${job}" "${COM_ATMOS_MASTER}/${FLUXFLIDX}" + fi + fi + + # process satellite look alike separately so that master pgb gets out in time + # set outtyp to 2 because master post already generates gfs io files + if [[ "${GOESF}" = "YES" ]]; then + export OUTTYP=${OUTTYP:-4} + + # specify output file name from chgres which is input file name to nceppost + # if model already runs gfs io, make sure GFSOUT is linked to the gfsio file + # new imported variable for global_post.sh + + export GFSOUT=${PREFIX}gfsio${fhr} + + # link satellite coefficients files, use hwrf version as ops crtm 2.0.5 + # does not new coefficient files used by post + export FIXCRTM="${FIXCRTM:-${CRTM_FIX}}" + "${USHgfs}/link_crtm_fix.sh" "${FIXCRTM}" + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-GOES.txt" + export CTLFILE="${PARMpost}/postcntrl_gfs_goes.xml" + fi + export FLXINP=flxfile + export FLXIOUT=flxifile + export PGBOUT=goesfile + export PGIOUT=goesifile + export FILTER=0 + export IO=0 + export JO=0 + export IGEN=0 + + if [[ "${NET}" = "gfs" ]]; then + ${POSTGPSH} + export err=$?; err_chk + fi + + if [[ "${GRIBVERSION}" = 'grib2' ]]; then + SPECIALFL="${PREFIX}special.grb2" + SPECIALFLIDX="${PREFIX}special.grb2i" + fi + fhr3=${fhr} + + if [[ "${SENDCOM}" = "YES" ]]; then + # echo "$PDY$cyc$pad$fhr" > $COMOUT/${RUN}.t${cyc}z.master.control + + mv goesfile "${COM_ATMOS_GOES}/${SPECIALFL}f${fhr}" + mv goesifile "${COM_ATMOS_GOES}/${SPECIALFLIDX}f${fhr}" + + if [[ "${SENDDBN}" = "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_SPECIAL_GB2 "${job}" "${COM_ATMOS_GOES}/${SPECIALFL}f${fhr}" + fi + fi + fi + # end of satellite processing + + ########################## WAFS start ########################## + # Generate WAFS products on ICAO standard level. + # Do not need to be sent out to public, WAFS package will process the data. + if [[ "${WAFSF}" = "YES" ]] && (( 10#${fhr} <= 120 )); then + if [[ "${RUN}" = gfs && "${GRIBVERSION}" = 'grib2' ]]; then + export OUTTYP=${OUTTYP:-4} + + # Extend WAFS icing and gtg up to 120 hours + export PostFlatFile="${PARMpost}/postxconfig-NT-GFS-WAFS.txt" + export CTLFILE="${PARMpost}/postcntrl_gfs_wafs.xml" + + # gtg has its own configurations + cp "${PARMpost}/gtg.config.gfs" gtg.config + cp "${PARMpost}/gtg_imprintings.txt" gtg_imprintings.txt + + export PGBOUT=wafsfile + export PGIOUT=wafsifile + + # WAFS data is processed: + # hourly if fhr<=24 + # every 3 forecast hour if 24 ../${RUN}.${cycle}.bufrsnd.tar.gz -cd $DATA +cd "${COM_ATMOS_BUFR}" || exit 2 +tar -cf - . | /usr/bin/gzip > "${RUN}.${cycle}.bufrsnd.tar.gz" +cd "${DATA}" || exit 2 ######################################## # Send the single tar file to OSO ######################################## -if test "$SENDDBN" = 'YES' -then - $DBNROOT/bin/dbn_alert MODEL GFS_BUFRSND_TAR $job \ - $COMOUT/${RUN}.${cycle}.bufrsnd.tar.gz +if [[ "${SENDDBN}" == 'YES' ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_BUFRSND_TAR "${job}" \ + "${COM_ATMOS_BUFR}/${RUN}.${cycle}.bufrsnd.tar.gz" fi ######################################## # Create Regional Collectives of BUFR data and # add appropriate WMO Headers. ######################################## -collect=' 1 2 3 4 5 6 7 8 9' -if [ $machine == "HERA" -o $machine == "JET" ]; then -for m in ${collect} -do -sh $USHbufrsnd/gfs_sndp.sh $m -done - -################################################ -# Convert the bufr soundings into GEMPAK files -################################################ -sh $USHbufrsnd/gfs_bfr2gpk.sh - -else rm -rf poe_col -for m in ${collect} -do -echo "sh $USHbufrsnd/gfs_sndp.sh $m " >> poe_col +for (( m = 1; m <10 ; m++ )); do + echo "sh ${USHbufrsnd}/gfs_sndp.sh ${m} " >> poe_col done -mv poe_col cmdfile +if [[ ${CFP_MP:-"NO"} == "YES" ]]; then + nl -n ln -v 0 poe_col > cmdfile +else + mv poe_col cmdfile +fi cat cmdfile chmod +x cmdfile ${APRUN_POSTSNDCFP} cmdfile -sh $USHbufrsnd/gfs_bfr2gpk.sh -fi -################################################ -# Convert the bufr soundings into GEMPAK files -################################################ -##$USHbufrsnd/gfs_bfr2gpk.sh - -##################################################################### -# GOOD RUN -set +x -echo "**************JOB GFS_meteogrm COMPLETED NORMALLY ON THE IBM" -echo "**************JOB GFS_meteogrm COMPLETED NORMALLY ON THE IBM" -echo "**************JOB GFS_meteogrm COMPLETED NORMALLY ON THE IBM" -set -x -##################################################################### - -msg='HAS COMPLETED NORMALLY.' -#postmsg "$jlogfile" "$msg" +sh "${USHbufrsnd}/gfs_bfr2gpk.sh" + ############## END OF SCRIPT ####################### diff --git a/scripts/exgfs_atmos_vminmon.sh b/scripts/exgfs_atmos_vminmon.sh new file mode 100755 index 0000000000..a1346d5f9e --- /dev/null +++ b/scripts/exgfs_atmos_vminmon.sh @@ -0,0 +1,116 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exgfs_vrfminmon.sh +# Script description: Runs data extract/validation for GSI normalization diag data +# +# Author: Ed Safford Org: NP23 Date: 2015-04-10 +# +# Abstract: This script runs the data extract/validation portion of the +# MinMon package. +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +################################################################################ + + +######################################## +# Set environment +######################################## +export RUN_ENVIR=${RUN_ENVIR:-nco} +export NET=${NET:-gfs} +export RUN=${RUN:-gfs} +export envir=${envir:-prod} + +######################################## +# Command line arguments +######################################## +export PDY=${1:-${PDY:?}} +export cyc=${2:-${cyc:?}} + +######################################## +# Directories +######################################## +export DATA=${DATA:-$(pwd)} + + +######################################## +# Filenames +######################################## +gsistat=${gsistat:-${COM_ATMOS_ANALYSIS}/gfs.t${cyc}z.gsistat} +export mm_gnormfile=${gnormfile:-${M_FIXgfs}/gfs_minmon_gnorm.txt} +export mm_costfile=${costfile:-${M_FIXgfs}/gfs_minmon_cost.txt} + +######################################## +# Other variables +######################################## +export MINMON_SUFFIX=${MINMON_SUFFIX:-GFS} +export PDATE=${PDY}${cyc} +export NCP=${NCP:-/bin/cp} +export pgm=exgfs_vrfminmon.sh + + + +if [[ ! -d ${DATA} ]]; then + mkdir $DATA +fi +cd $DATA + +###################################################################### + +data_available=0 + +if [[ -s ${gsistat} ]]; then + + data_available=1 + + #------------------------------------------------------------------ + # Copy the $MINMON_SUFFIX.gnorm_data.txt file to the working directory + # It's ok if it doesn't exist; we'll create a new one if needed. + #------------------------------------------------------------------ + if [[ -s ${M_TANKverf}/gnorm_data.txt ]]; then + $NCP ${M_TANKverf}/gnorm_data.txt gnorm_data.txt + elif [[ -s ${M_TANKverfM1}/gnorm_data.txt ]]; then + $NCP ${M_TANKverfM1}/gnorm_data.txt gnorm_data.txt + fi + + + #------------------------------------------------------------------ + # Run the child sccripts. + #------------------------------------------------------------------ + ${USHminmon}/minmon_xtrct_costs.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_costs=$? + echo "rc_costs = $rc_costs" + + ${USHminmon}/minmon_xtrct_gnorms.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_gnorms=$? + echo "rc_gnorms = $rc_gnorms" + + ${USHminmon}/minmon_xtrct_reduct.pl ${MINMON_SUFFIX} ${PDY} ${cyc} ${gsistat} dummy + rc_reduct=$? + echo "rc_reduct = $rc_reduct" + +fi + +##################################################################### +# Postprocessing + +err=0 +if [[ ${data_available} -ne 1 ]]; then + err=1 +elif [[ $rc_costs -ne 0 ]]; then + err=$rc_costs +elif [[ $rc_gnorms -ne 0 ]]; then + err=$rc_gnorms +elif [[ $rc_reduct -ne 0 ]]; then + err=$rc_reduct +fi + +exit ${err} + diff --git a/scripts/exgfs_pmgr.sh b/scripts/exgfs_pmgr.sh index d16db66032..a417bbed55 100755 --- a/scripts/exgfs_pmgr.sh +++ b/scripts/exgfs_pmgr.sh @@ -1,13 +1,14 @@ -#! /bin/ksh +#! /usr/bin/env bash + # # Script name: exgfs_pmgr.sh.sms # # This script monitors the progress of the gfs_fcst job # -set -x + +source "$HOMEgfs/ush/preamble.sh" hour=00 -typeset -Z2 hour TEND=384 TCP=385 @@ -17,15 +18,12 @@ fi while [ $hour -lt $TCP ]; do + hour=$(printf "%02d" $hour) echo $hour >>posthours - if [ $hour -lt 240 ] + if [ 10#$hour -lt 240 ] then - if [ $hour -eq 99 ] - then - typeset -Z3 hour - fi # JY if [ $hour -lt 12 ] - if [ $hour -lt 120 ] + if [ 10#$hour -lt 120 ] then let "hour=hour+1" else @@ -74,6 +72,5 @@ do done -echo Exiting $0 exit diff --git a/scripts/exgfs_prdgen_manager.sh b/scripts/exgfs_prdgen_manager.sh index 6584178a7e..7d0a95696b 100755 --- a/scripts/exgfs_prdgen_manager.sh +++ b/scripts/exgfs_prdgen_manager.sh @@ -1,13 +1,14 @@ -#! /bin/ksh +#! /usr/bin/env bash + # # Script name: exgfs_pmgr.sh.sms # # This script monitors the progress of the gfs_fcst job # -set -x + +source "$HOMEgfs/ush/preamble.sh" hour=00 -typeset -Z2 hour TEND=384 TCP=385 @@ -17,13 +18,11 @@ fi while [ $hour -lt $TCP ]; do + hour=$(printf "%02d" $hour) echo $hour >>pgrb2_hours - if [ $hour -lt 240 ] + if [ 10#$hour -lt 240 ] then - if [ $hour -eq 99 ]; then - typeset -Z3 hour - fi - if [ $hour -lt 120 ] + if [ 10#$hour -lt 120 ] then let "hour=hour+1" else @@ -71,6 +70,5 @@ do done -echo Exiting $0 exit diff --git a/scripts/exgfs_wave_init.sh b/scripts/exgfs_wave_init.sh index ec3e9443b2..2be224d1da 100755 --- a/scripts/exgfs_wave_init.sh +++ b/scripts/exgfs_wave_init.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -20,43 +20,35 @@ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # ############################################################################### # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - set -x +source "${HOMEgfs}/ush/preamble.sh" + +# 0.a Basic modes of operation err=0 - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x cd $DATA - msg="HAS BEGUN on $(hostname)" - postmsg "$jlogfile" "$msg" - msg="Starting MWW3 INIT CONFIG SCRIPT for ${CDUMP}wave" - postmsg "$jlogfile" "$msg" - set +x echo ' ' echo ' ********************************' echo ' *** MWW3 INIT CONFIG SCRIPT ***' echo ' ********************************' echo ' Initial configuration script' - echo " Model identifier : ${CDUMP}wave" + echo " Model identifier : ${RUN}wave" echo ' ' echo "Starting at : $(date)" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. - if [ -z ${NTASKS} ] + if [ -z ${NTASKS} ] then echo "FATAL ERROR: requires NTASKS to be set " err=1; export err;${errchk} @@ -66,7 +58,7 @@ echo ' ' echo " Script set to run with $NTASKS tasks " echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # --------------------------------------------------------------------------- # @@ -76,7 +68,7 @@ echo 'Preparing input files :' echo '-----------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # 1.a Model definition files @@ -87,38 +79,34 @@ chmod 744 cmdfile # Eliminate duplicate grids - array=($WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD $wavesbsGRD $wavepostGRD $waveinterpGRD) + array=($WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD $wavepostGRD $waveinterpGRD) grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ') - for grdID in ${grdALL} - do - if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] - then + for grdID in ${grdALL}; do + if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then set +x - echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." - [[ "$LOUD" = YES ]] && set -x - cp $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID + echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...." + set_trace + cp "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}" else set +x - echo " Mod def file for $grdID not found in ${COMIN}/rundata. Setting up to generate ..." + echo " Mod def file for ${grdID} not found in ${COM_WAVE_PREP}. Setting up to generate ..." echo ' ' - [[ "$LOUD" = YES ]] && set -x - if [ -f $PARMwave/ww3_grid.inp.$grdID ] + set_trace + if [ -f $FIXwave/ww3_grid.inp.$grdID ] then - cp $PARMwave/ww3_grid.inp.$grdID ww3_grid.inp.$grdID + cp $FIXwave/ww3_grid.inp.$grdID ww3_grid.inp.$grdID fi if [ -f ww3_grid.inp.$grdID ] then set +x echo ' ' - echo " ww3_grid.inp.$grdID copied ($PARMwave/ww3_grid.inp.$grdID)." + echo " ww3_grid.inp.$grdID copied ($FIXwave/ww3_grid.inp.$grdID)." echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace else - msg="ABNORMAL EXIT: NO INP FILE FOR MODEL DEFINITION FILE" - postmsg "$jlogfile" "$msg" set +x echo ' ' echo '*********************************************************** ' @@ -126,12 +114,11 @@ echo '*********************************************************** ' echo " grdID = $grdID" echo ' ' - echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace err=2;export err;${errchk} fi - [[ ! -d $COMOUT/rundata ]] && mkdir -m 775 -p $COMOUT/rundata + [[ ! -d "${COM_WAVE_PREP}" ]] && mkdir -m 775 -p "${COM_WAVE_PREP}" if [ ${CFP_MP:-"NO"} = "YES" ]; then echo "$nmoddef $USHwave/wave_grid_moddef.sh $grdID > $grdID.out 2>&1" >> cmdfile else @@ -143,7 +130,7 @@ fi done -# 1.a.1 Execute parallel or serialpoe +# 1.a.1 Execute parallel or serialpoe if [ "$nmoddef" -gt '0' ] then @@ -152,7 +139,7 @@ echo ' ' echo " Generating $nmoddef mod def files" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # Set number of processes for mpmd wavenproc=$(wc -l cmdfile | awk '{print $1}') @@ -165,8 +152,7 @@ echo " Executing the mod_def command file at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x - + set_trace if [ "$NTASKS" -gt '1' ] then if [ ${CFP_MP:-"NO"} = "YES" ]; then @@ -179,7 +165,7 @@ ./cmdfile exit=$? fi - + if [ "$exit" != '0' ] then set +x @@ -189,25 +175,21 @@ echo '********************************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace fi - - fi + + fi # 1.a.3 File check - for grdID in ${grdALL} - do - if [ -f ${COMOUT}/rundata/${CDUMP}wave.mod_def.$grdID ] - then + for grdID in ${grdALL}; do + if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then set +x echo ' ' echo " mod_def.$grdID succesfully created/copied " echo ' ' - [[ "$LOUD" = YES ]] && set -x - else - msg="ABNORMAL EXIT: NO MODEL DEFINITION FILE" - postmsg "$jlogfile" "$msg" + set_trace + else set +x echo ' ' echo '********************************************** ' @@ -215,24 +197,14 @@ echo '********************************************** ' echo " grdID = $grdID" echo ' ' - echo $msg sed "s/^/$grdID.out : /g" $grdID.out - [[ "$LOUD" = YES ]] && set -x + set_trace err=3;export err;${errchk} fi done # --------------------------------------------------------------------------- # -# 2. Ending - - set +x - echo ' ' - echo "Ending at : $(date)" - echo ' ' - echo ' *** End of MWW3 Init Config ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x +# 2. Ending - exit $err # End of MWW3 init config script ------------------------------------------- # diff --git a/scripts/exgfs_wave_nawips.sh b/scripts/exgfs_wave_nawips.sh index c4fa9e764d..09d23ec685 100755 --- a/scripts/exgfs_wave_nawips.sh +++ b/scripts/exgfs_wave_nawips.sh @@ -1,20 +1,21 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################### -echo "----------------------------------------------------" -echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" -echo "----------------------------------------------------" -echo "History: Mar 2000 - First implementation of this new script." -echo "Sept 2011 - First implementation of this new script based on" -echo " /nwprod/scripts/exnawips.sh.sms" -echo " March 2020- Modified for GEFSv12.0" +# echo "----------------------------------------------------" +# echo "exnawips - convert NCEP GRIB files into GEMPAK Grids" +# echo "----------------------------------------------------" +# echo "History: Mar 2000 - First implementation of this new script." +# echo "Sept 2011 - First implementation of this new script based on" +# echo " /nwprod/scripts/exnawips.sh.sms" +# echo " March 2020- Modified for GEFSv12.0" # March-2020 Roberto.Padilla@noaa.gov ##################################################################### -set -xa +source "$HOMEgfs/ush/preamble.sh" #export grids=${grids:-'glo_30m at_10m ep_10m wc_10m ao_9km'} #Interpolated grids -export grids=${grids:-'glo_10m gso_15m ao_9km'} #Native grids -export RUNwave=${RUNwave:-${RUN}${COMPONENT}} +export grids=${grids:-'glo_30m'} #Native grids +export RUNwave=${RUNwave:-${RUN}wave} export fstart=${fstart:-0} export FHMAX_WAV=${FHMAX_WAV:-180} #180 Total of hours to process export FHMAX_HF_WAV=${FHMAX_HF_WAV:-72} @@ -70,7 +71,7 @@ while [ $fhcnt -le $FHMAX_WAV ]; do *) gridIDin= grdIDout= ;; esac - GRIBIN=$COMIN/gridded/$RUNwave.$cycle.$grdIDin.f${fhr}.grib2 + GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdIDin}.f${fhr}.grib2" GRIBIN_chk=$GRIBIN.idx icnt=1 @@ -83,14 +84,13 @@ while [ $fhcnt -le $FHMAX_WAV ]; do fi if [ $icnt -ge $maxtries ]; then msg="ABORTING after 5 minutes of waiting for $GRIBIN." - postmsg "$jlogfile" "$msg" echo ' ' echo '**************************** ' echo '*** ERROR : NO GRIB FILE *** ' echo '**************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog err=1;export err;${errchk} || exit ${err} fi @@ -103,7 +103,6 @@ while [ $fhcnt -le $FHMAX_WAV ]; do OK=$? if [ "$OK" != '0' ]; then msg="ABNORMAL EXIT: ERROR IN interpolation the global grid" - postmsg "$jlogfile" "$msg" #set +x echo ' ' echo '************************************************************* ' @@ -111,7 +110,7 @@ while [ $fhcnt -le $FHMAX_WAV ]; do echo '************************************************************* ' echo ' ' echo $msg - #[[ "$LOUD" = YES ]] && set -x + #set_trace echo "$RUNwave $grdID prdgen $date $cycle : error in grbindex." >> $wavelog err=2;export err;err_chk else @@ -159,12 +158,11 @@ while [ $fhcnt -le $FHMAX_WAV ]; do fi if [ $SENDCOM = "YES" ] ; then - cpfs $GEMGRD $COMOUT/$GEMGRD + cpfs "${GEMGRD}" "${COM_WAVE_GEMPAK}/${GEMGRD}" if [ $SENDDBN = "YES" ] ; then - $DBNROOT/bin/dbn_alert MODEL ${DBN_ALERT_TYPE} $job \ - $COMOUT/$GEMGRD + "${DBNROOT}/bin/dbn_alert" MODEL "${DBN_ALERT_TYPE}" "${job}" "${COM_WAVE_GEMPAK}/${GEMGRD}" else - echo "##### DBN_ALERT is: MODEL ${DBN_ALERT_TYPE} $job $COMOUT/$GEMGRD#####" + echo "##### DBN_ALERT is: MODEL ${DBN_ALERT_TYPE} ${job} ${COM_WAVE_GEMPAK}/${GEMGRD}#####" fi fi rm grib_$grid @@ -177,14 +175,6 @@ while [ $fhcnt -le $FHMAX_WAV ]; do let fhcnt=fhcnt+inc done ##################################################################### -# GOOD RUN -set +x -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -echo "**************JOB $RUN NAWIPS COMPLETED NORMALLY ON THE IBM" -set -x -##################################################################### -msg='Job completed normally.' -echo $msg -postmsg "$jlogfile" "$msg" + + ############################### END OF SCRIPT ####################### diff --git a/scripts/exgfs_wave_post_gridded_sbs.sh b/scripts/exgfs_wave_post_gridded_sbs.sh index 75bca7e82b..54350180fe 100755 --- a/scripts/exgfs_wave_post_gridded_sbs.sh +++ b/scripts/exgfs_wave_post_gridded_sbs.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -8,45 +8,39 @@ # # Author: Jose-Henrique Alves Org: NCEP/EMC Date: 2019-12-06 # Abstract: This script is the postprocessor for the wave component in GFS. -# This version runs side-by-side with the GFS fcst step. +# This version runs side-by-side with the GFS fcst step. # It executes several scripts forpreparing and creating output data # as follows: # -# wave_grib2_sbs.sh : generates GRIB2 files. -# wave_grid_interp_ush.sh : interpolates data from new grids to old grids +# wave_grib2_sbs.sh : generates GRIB2 files. +# wave_grid_interp_ush.sh : interpolates data from new grids to old grids # # Script history log: -# 2019-12-06 J-Henrique Alves: First Version adapted from HTolman post.sh 2007 +# 2019-12-06 J-Henrique Alves: First Version adapted from HTolman post.sh 2007 # 2020-06-10 J-Henrique Alves: Porting to R&D machine Hera -# 2020-07-31 Jessica Meixner: Removing points, now gridded data only +# 2020-07-31 Jessica Meixner: Removing points, now gridded data only # # $Id$ # # Attributes: # Language: Bourne-again (Bash) Shell -# Machine: WCOSS-DELL-P3 # ############################################################################### # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - set -x - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +source "$HOMEgfs/ush/preamble.sh" + +# 0.a Basic modes of operation # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic - export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} + export WAV_MOD_TAG=${RUN}wave${waveMEMB} cd $DATA - postmsg "$jlogfile" "HAS BEGUN on $(hostname)" - - msg="Starting WAVE POSTPROCESSOR SCRIPT for $WAV_MOD_TAG" - postmsg "$jlogfile" "$msg" + echo "Starting WAVE POSTPROCESSOR SCRIPT for $WAV_MOD_TAG" set +x echo ' ' @@ -57,11 +51,11 @@ echo "Starting at : $(date)" echo '-------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. - if [ -z ${NTASKS} ] + if [ -z ${NTASKS} ] then echo "FATAL ERROR: requires NTASKS to be set " err=1; export err;${errchk} @@ -73,7 +67,6 @@ # 0.c.1 Grids export waveGRD=${waveGRD?Var waveGRD Not Set} - export wavesbsGRD=${wavesbsGRD?Var wavesbsGRD Not Set} # 0.c.2 extended global grid and rtma transfer grid export waveinterpGRD=${waveinterpGRD?Var wavepostGRD Not Set} @@ -85,27 +78,18 @@ echo 'Grid information :' echo '-------------------' echo " Native wave grids : $waveGRD" - echo " Side-by-side grids : $wavesbsGRD" echo " Interpolated grids : $waveinterpGRD" echo " Post-process grids : $wavepostGRD" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace - -# 0.c.3 Define CDATE_POST as a function of RERUN variable setting - if [ "${RERUN}" = "YES" ]; then - export CDATE_POST=${CDATE_RST} - export FHRUN=$($NHOUR ${CDATE_RST} ${CDATE}) - else # regular run - export CDATE_POST=${CDATE} - export FHRUN=0 - fi + export FHRUN=0 # --------------------------------------------------------------------------- # # 1. Get files that are used by most child scripts - export DOGRB_WAV='YES' #Create grib2 files - export DOGRI_WAV='NO' #Create interpolated grids + export DOGRB_WAV=${DOGRB_WAV:-'YES'} #Create grib2 files + export DOGRI_WAV=${DOGRI_WAV:-'NO'} #Create interpolated grids exit_code=0 @@ -113,46 +97,41 @@ echo ' ' echo 'Preparing input files :' echo '-----------------------' - [[ "$LOUD" = YES ]] && set -x + set_trace -# 1.a Model definition files and output files (set up using poe) +# 1.a Model definition files and output files (set up using poe) # 1.a.1 Copy model definition files - for grdID in $waveGRD $wavesbsGRD $wavepostGRD $waveinterpGRD - do - if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] - then + for grdID in ${waveGRD} ${wavepostGRD} ${waveinterpGRD}; do + if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then set +x - echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." - [[ "$LOUD" = YES ]] && set -x + echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...." + set_trace - cp -f $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID + cp -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}" fi done -# 1.a.2 Check that model definition files exist - for grdID in $waveGRD $wavesbsGRD $wavepostGRD $waveinterpGRD - do - if [ ! -f mod_def.$grdID ] - then +# 1.a.2 Check that model definition files exist + for grdID in ${waveGRD} ${wavepostGRD} ${waveinterpGRD}; do + if [[ ! -f "mod_def.${grdID}" ]]; then set +x echo ' ' echo '*************************************************** ' echo " FATAL ERROR : NO MOD_DEF FILE mod_def.$grdID " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : NO MOD_DEF file mod_def.$grdID" + set_trace err=2; export err;${errchk} exit $err DOGRB_WAV='NO' else set +x echo "File mod_def.$grdID found. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + set_trace fi done - + # 1.b Input template files @@ -164,12 +143,12 @@ then cp -f $PARMwave/${intGRD}_interp.inp.tmpl ${intGRD}_interp.inp.tmpl fi - + if [ -f ${intGRD}_interp.inp.tmpl ] then set +x echo " ${intGRD}_interp.inp.tmpl copied. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + set_trace else set +x echo ' ' @@ -177,9 +156,8 @@ echo '*** ERROR : NO TEMPLATE FOR GRINT INPUT FILE *** ' echo '*********************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - echo "$WAV_MOD_TAG post $date $cycle : GRINT template file missing." - postmsg "$jlogfile" "NON-FATAL ERROR : NO TEMPLATE FOR GRINT INPUT FILE" + set_trace + echo "${WAV_MOD_TAG} post ${PDY} ${cycle} : GRINT template file missing." exit_code=1 DOGRI_WAV='NO' fi @@ -199,7 +177,7 @@ then set +x echo " ww3_grib2.${grbGRD}.inp.tmpl copied. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + set_trace else set +x echo ' ' @@ -207,8 +185,7 @@ echo "*** ERROR : NO TEMPLATE FOR ${grbGRD} GRIB INPUT FILE *** " echo '*********************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "NON-FATAL ERROR : NO TEMPLATE FOR GRIB2 INPUT FILE" + set_trace exit_code=2 DOGRB_WAV='NO' fi @@ -221,13 +198,13 @@ set +x echo ' ' echo " Input files read and processed at : $(date)" - echo ' ' + echo ' ' echo ' Data summary : ' echo ' ---------------------------------------------' echo " Sufficient data for GRID interpolation : $DOGRI_WAV" echo " Sufficient data for GRIB files : $DOGRB_WAV" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # --------------------------------------------------------------------------- # # 2. Make consolidated grib2 file for side-by-side grids and interpolate @@ -237,16 +214,12 @@ set +x echo ' Making command file for sbs grib2 and GRID Interpolation ' - [[ "$LOUD" = YES ]] && set -x + set_trace - rm -f cmdfile - touch cmdfile - chmod 744 cmdfile - -# 1.a.2 Loop over forecast time to generate post files +# 1.a.2 Loop over forecast time to generate post files # When executed side-by-side, serial mode (cfp when run after the fcst step) # Contingency for RERUN=YES - if [ "${RERUN}" = "YES" ]; then + if [ "${RERUN-NO}" = "YES" ]; then fhr=$((FHRUN + FHMIN_WAV)) if [ $FHMAX_HF_WAV -gt 0 ] && [ $FHOUT_HF_WAV -gt 0 ] && [ $fhr -lt $FHMAX_HF_WAV ]; then FHINCG=$FHOUT_HF_WAV @@ -260,8 +233,8 @@ fhrg=$fhr iwaitmax=120 # Maximum loop cycles for waiting until wave component output file is ready (fails after max) while [ $fhr -le $FHMAX_WAV ]; do - - ymdh=$($NDATE $fhr $CDATE) + + ymdh=$($NDATE $fhr ${PDY}${cyc}) YMD=$(echo $ymdh | cut -c1-8) HMS="$(echo $ymdh | cut -c9-10)0000" YMDHMS=${YMD}${HMS} @@ -269,10 +242,11 @@ fcmdnow=cmdfile.${FH3} fcmdigrd=icmdfile.${FH3} - rm -f ${fcmdnow} ${fcmdigrd} - touch ${fcmdnow} ${fcmdigrd} mkdir output_$YMDHMS cd output_$YMDHMS + rm -f ${fcmdnow} ${fcmdigrd} + touch ${fcmdnow} ${fcmdigrd} + # Create instances of directories for gridded output export GRIBDATA=${DATA}/output_$YMDHMS @@ -284,20 +258,19 @@ then iwait=0 for wavGRD in ${waveGRD} ; do - gfile=$COMIN/rundata/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS} + gfile=${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_grd.${wavGRD}.${YMD}.${HMS} while [ ! -s ${gfile} ]; do sleep 10; let iwait=iwait+1; done - if [ $iwait -eq $iwaitmax ]; then + if [ $iwait -eq $iwaitmax ]; then echo '*************************************************** ' echo " FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.$grdID " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - echo "$WAV_MOD_TAG post $grdID $date $cycle : field output missing." - postmsg "$jlogfile" "NON-FATAL ERROR : NO RAW FIELD OUTPUT FILE out_grd.$grdID" + set_trace + echo "${WAV_MOD_TAG} post ${grdID} ${PDY} ${cycle} : field output missing." err=3; export err;${errchk} exit $err fi - ln -s ${gfile} ./out_grd.${wavGRD} + ln -s ${gfile} ./out_grd.${wavGRD} done if [ "$DOGRI_WAV" = 'YES' ] @@ -305,13 +278,15 @@ nigrd=1 for grdID in $waveinterpGRD do - ymdh_int=$($NDATE -${WAVHINDH} $ymdh); dt_int=3600.; n_int=9999 ; - echo "$USHwave/wave_grid_interp_sbs.sh $grdID $ymdh_int $dt_int $n_int > grint_$grdID.out 2>&1" >> ${fcmdigrd}.${nigrd} + ymdh_int=$($NDATE -${WAVHINDH} $ymdh); dt_int=3600.; n_int=9999 ; + echo "$USHwave/wave_grid_interp_sbs.sh $grdID $ymdh_int $dt_int $n_int > grint_$grdID.out 2>&1" >> ${fcmdigrd}.${nigrd} if [ "$DOGRB_WAV" = 'YES' ] then - gribFL=\'$(echo ${OUTPARS_WAV})\' + gribFL=\'$(echo ${OUTPARS_WAV})\' case $grdID in glo_15mxt) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; + reg025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; + glo_025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; glo_30mxt) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255 ; MODNR=11 ;; glo_30m) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255 ; MODNR=11 ;; at_10m) GRDNAME='atlocn' ; GRDRES=0p16 ; GRIDNR=255 ; MODNR=11 ;; @@ -319,11 +294,11 @@ wc_10m) GRDNAME='wcoast' ; GRDRES=0p16 ; GRIDNR=255 ; MODNR=11 ;; ak_10m) GRDNAME='alaska' ; GRDRES=0p16 ; GRIDNR=255 ; MODNR=11 ;; esac - echo "$USHwave/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdigrd}.${nigrd} + echo "$USHwave/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdigrd}.${nigrd} fi echo "${GRIBDATA}/${fcmdigrd}.${nigrd}" >> ${fcmdnow} chmod 744 ${fcmdigrd}.${nigrd} - nigrd=$((nigrd+1)) + nigrd=$((nigrd+1)) done fi @@ -342,9 +317,11 @@ ao_20m) GRDNAME='arctic' ; GRDRES=0p33 ; GRIDNR=255 ; MODNR=11 ;; so_20m) GRDNAME='antarc' ; GRDRES=0p33 ; GRIDNR=255 ; MODNR=11 ;; glo_15mxt) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; + reg025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; + glo_025) GRDNAME='global' ; GRDRES=0p25 ; GRIDNR=255 ; MODNR=11 ;; gwes_30m) GRDNAME='global' ; GRDRES=0p50 ; GRIDNR=255 ; MODNR=10 ;; esac - echo "$USHwave/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdnow} + echo "$USHwave/wave_grib2_sbs.sh $grdID $GRIDNR $MODNR $ymdh $fhr $GRDNAME $GRDRES $gribFL > grib_$grdID.out 2>&1" >> ${fcmdnow} done fi @@ -358,18 +335,18 @@ nlines=$( wc -l ${fcmdnow} | awk '{print $1}' ) while [ $iline -le $nlines ]; do line=$( sed -n ''$iline'p' ${fcmdnow} ) - if [ -z "$line" ]; then + if [ -z "$line" ]; then break else - if [ "$ifirst" = 'yes' ]; then - echo "#!/bin/sh" > cmdmfile.$nfile + if [ "$ifirst" = 'yes' ]; then + echo "#!/bin/sh" > cmdmfile.$nfile echo "$nfile cmdmfile.$nfile" >> cmdmprog chmod 744 cmdmfile.$nfile fi echo $line >> cmdmfile.$nfile nfile=$(( nfile + 1 )) if [ $nfile -eq $NTASKS ]; then - nfile=0 + nfile=0 ifirst='no' fi iline=$(( iline + 1 )) @@ -385,7 +362,7 @@ echo " Executing the grib2_sbs scripts at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace if [ "$wavenproc" -gt '1' ] then @@ -410,7 +387,7 @@ echo '*************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=4; export err;${errchk} exit $err fi @@ -425,8 +402,8 @@ # Check if grib2 file created ENSTAG="" if [ ${waveMEMB} ]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi - gribchk=${CDUMP}wave.${cycle}${ENSTAG}.${GRDNAME}.${GRDRES}.f${FH3}.grib2 - if [ ! -s ${COMOUT}/gridded/${gribchk} ]; then + gribchk="${RUN}wave.${cycle}${ENSTAG}.${GRDNAME}.${GRDRES}.f${FH3}.grib2" + if [ ! -s ${COM_WAVE_GRID}/${gribchk} ]; then set +x echo ' ' echo '********************************************' @@ -434,7 +411,7 @@ echo '********************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=5; export err;${errchk} exit $err fi @@ -454,28 +431,6 @@ # --------------------------------------------------------------------------- # # 7. Ending output - set +x - echo ' ' - echo "Ending at : $(date)" - echo '-----------' - echo ' ' - echo ' *** End of MWW3 postprocessor ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - - if [ "$exit_code" -ne '0' ] - then - echo " FATAL ERROR: Problem in MWW3 POST" - msg="ABNORMAL EXIT: Problem in MWW3 POST" - postmsg "$jlogfile" "$msg" - echo $msg - err=6; export err;${errchk} - exit $err - else - echo " Side-by-Side Wave Post Completed Normally " - msg="$job completed normally" - postmsg "$jlogfile" "$msg" - exit 0 - fi +echo "$exit_code" # End of MWW3 prostprocessor script ---------------------------------------- # diff --git a/scripts/exgfs_wave_post_pnt.sh b/scripts/exgfs_wave_post_pnt.sh index 85f2383304..a7aa957564 100755 --- a/scripts/exgfs_wave_post_pnt.sh +++ b/scripts/exgfs_wave_post_pnt.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -11,13 +11,13 @@ # It executes several scripts forpreparing and creating output data # as follows: # -# wave_outp_spec.sh : generates spectral data for output locations -# wave_outp_bull.sh : generates bulletins for output locations +# wave_outp_spec.sh : generates spectral data for output locations +# wave_outp_bull.sh : generates bulletins for output locations # wave_outp_cat.sh : cats the by hour into the single output file -# wave_tar.sh : tars the spectral and bulletin multiple files +# wave_tar.sh : tars the spectral and bulletin multiple files # # Script history log: -# 2019-12-06 J-Henrique Alves: First Version adapted from HTolman post.sh 2007 +# 2019-12-06 J-Henrique Alves: First Version adapted from HTolman post.sh 2007 # 2020-06-10 J-Henrique Alves: Porting to R&D machine Hera # 2020-07-30 Jessica Meixner: Points only - no gridded data # 2020-09-29 Jessica Meixner: optimized by changing loop structures @@ -26,18 +26,15 @@ # # Attributes: # Language: Bourne-again (Bash) Shell -# Machine: WCOSS-DELL-P3 # ############################################################################### # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - set -x - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +source "$HOMEgfs/ush/preamble.sh" + +# 0.a Basic modes of operation cd $DATA @@ -45,10 +42,8 @@ # if ensemble; waveMEMB var empty in deterministic export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} - postmsg "$jlogfile" "HAS BEGUN on $(hostname)" - - msg="Starting WAVE PNT POSTPROCESSOR SCRIPT for $WAV_MOD_TAG" - postmsg "$jlogfile" "$msg" + echo "HAS BEGUN on $(hostname)" + echo "Starting WAVE PNT POSTPROCESSOR SCRIPT for $WAV_MOD_TAG" set +x echo ' ' @@ -59,11 +54,11 @@ echo "Starting at : $(date)" echo '-------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. - if [ -z ${NTASKS} ] + if [ -z ${NTASKS} ] then echo "FATAL ERROR: requires NTASKS to be set " err=1; export err;${errchk} @@ -79,7 +74,7 @@ export STA_DIR=$DATA/station_ascii_files if [ -d $STA_DIR ] - then + then rm -rf ${STA_DIR} fi mkdir -p ${STA_DIR} @@ -96,7 +91,7 @@ echo '-------------------' echo " Output points : $waveuoutpGRD" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # --------------------------------------------------------------------------- # # 1. Get files that are used by most child scripts @@ -107,9 +102,9 @@ echo ' ' echo 'Preparing input files :' echo '-----------------------' - [[ "$LOUD" = YES ]] && set -x + set_trace -# 1.a Model definition files and output files (set up using poe) +# 1.a Model definition files and output files (set up using poe) # 1.a.1 Set up the parallel command tasks @@ -117,19 +112,18 @@ touch cmdfile chmod 744 cmdfile - [[ "$LOUD" = YES ]] && set -x + set_trace # Copy model definition files - for grdID in $waveuoutpGRD - do - if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] - then + iloop=0 + for grdID in ${waveuoutpGRD}; do + if [[ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ]]; then set +x - echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." - [[ "$LOUD" = YES ]] && set -x + echo " Mod def file for ${grdID} found in ${COM_WAVE_PREP}. copying ...." + set_trace - cp -f $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID - iloop=$(expr $iloop + 1) + cp -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" "mod_def.${grdID}" + iloop=$((iloop + 1)) fi done @@ -143,17 +137,16 @@ echo " FATAL ERROR : NO MOD_DEF FILE mod_def.$grdID " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : NO MOD_DEF file mod_def.$grdID" + set_trace err=2; export err;${errchk} exit $err else set +x echo "File mod_def.$grdID found. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + set_trace fi done - + # 1.c Output locations file rm -f buoy.loc @@ -174,7 +167,7 @@ then set +x echo " buoy.loc and buoy.ibp copied and processed ($PARMwave/wave_${NET}.buoys)." - [[ "$LOUD" = YES ]] && set -x + set_trace else set +x echo ' ' @@ -182,8 +175,7 @@ echo ' FATAL ERROR : NO BUOY LOCATION FILE ' echo '************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : NO BUOY LOCATION FILE" + set_trace err=3; export err;${errchk} exit $err DOSPC_WAV='NO' @@ -201,7 +193,7 @@ then set +x echo " ww3_outp_spec.inp.tmpl copied. Syncing to all grids ..." - [[ "$LOUD" = YES ]] && set -x + set_trace else set +x echo ' ' @@ -209,8 +201,7 @@ echo '*** ERROR : NO TEMPLATE FOR SPEC INPUT FILE *** ' echo '*********************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "NON-FATAL ERROR : NO TEMPLATE FOR SPEC INPUT FILE" + set_trace exit_code=3 DOSPC_WAV='NO' DOBLL_WAV='NO' @@ -225,7 +216,7 @@ then set +x echo " ww3_outp_bull.inp.tmpl copied. Syncing to all nodes ..." - [[ "$LOUD" = YES ]] && set -x + set_trace else set +x echo ' ' @@ -233,8 +224,7 @@ echo '*** ERROR : NO TEMPLATE FOR BULLETIN INPUT FILE *** ' echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "NON-FATAL ERROR : NO TEMPLATE FOR BULLETIN INPUT FILE" + set_trace exit_code=4 DOBLL_WAV='NO' fi @@ -243,46 +233,42 @@ if [ "$DOSPC_WAV" = 'YES' ] || [ "$DOBLL_WAV" = 'YES' ] then - ymdh=$($NDATE -${WAVHINDH} $CDATE) - tstart="$(echo $ymdh | cut -c1-8) $(echo $ymdh | cut -c9-10)0000" + ymdh=$(${NDATE} -"${WAVHINDH}" "${PDY}${cyc}") + tstart="${ymdh:0:8} ${ymdh:8:2}0000" dtspec=3600. # default time step (not used here) - sed -e "s/TIME/$tstart/g" \ - -e "s/DT/$dtspec/g" \ + sed -e "s/TIME/${tstart}/g" \ + -e "s/DT/${dtspec}/g" \ -e "s/POINT/1/g" \ -e "s/ITYPE/0/g" \ -e "s/FORMAT/F/g" \ ww3_outp_spec.inp.tmpl > ww3_outp.inp - + ln -s mod_def.$waveuoutpGRD mod_def.ww3 - YMD=$(echo $CDATE | cut -c1-8) - HMS="$(echo $CDATE | cut -c9-10)0000" - if [ -f $COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} ] - then - ln -s $COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} ./out_pnt.${waveuoutpGRD} + HMS="${cyc}0000" + if [[ -f "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" ]]; then + ln -s "${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${PDY}.${HMS}" \ + "./out_pnt.${waveuoutpGRD}" else echo '*************************************************** ' - echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.${waveuoutpGRD}.${YMD}.${HMS} " + echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.${waveuoutpGRD}.${PDY}.${HMS} " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - echo "$WAV_MOD_TAG post $waveuoutpGRD $CDATE $cycle : field output missing." - postmsg "$jlogfile" "FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" + set_trace + echo "${WAV_MOD_TAG} post ${waveuoutpGRD} ${PDY}${cyc} ${cycle} : field output missing." err=4; export err;${errchk} fi - + rm -f buoy_tmp.loc buoy_log.ww3 ww3_oup.inp ln -fs ./out_pnt.${waveuoutpGRD} ./out_pnt.ww3 ln -fs ./mod_def.${waveuoutpGRD} ./mod_def.ww3 export pgm=ww3_outp;. prep_step - $EXECwave/ww3_outp > buoy_lst.loc 2>&1 + $EXECwave/ww3_outp > buoy_lst.loc 2>&1 export err=$?;err_chk if [ "$err" != '0' ] && [ ! -f buoy_log.ww3 ] then pgm=wave_post - msg="ABNORMAL EXIT: ERROR IN ww3_outp" - postmsg "$jlogfile" "$msg" set +x echo ' ' echo '******************************************** ' @@ -291,8 +277,7 @@ echo ' ' cat buoy_tmp.loc echo "$WAV_MOD_TAG post $date $cycle : buoy log file failed to be created." - echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace err=5;export err;${errchk} DOSPC_WAV='NO' DOBLL_WAV='NO' @@ -315,7 +300,7 @@ then set +x echo 'Buoy log file created. Syncing to all nodes ...' - [[ "$LOUD" = YES ]] && set -x + set_trace else set +x echo ' ' @@ -323,8 +308,7 @@ echo '*** ERROR : NO BUOY LOG FILE CREATED *** ' echo '**************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : NO BUOY LOG FILE GENERATED FOR SPEC AND BULLETIN FILES" + set_trace err=6;export err;${errchk} DOSPC_WAV='NO' DOBLL_WAV='NO' @@ -337,41 +321,41 @@ set +x echo ' ' echo " Input files read and processed at : $(date)" - echo ' ' + echo ' ' echo ' Data summary : ' echo ' ---------------------------------------------' echo " Sufficient data for spectral files : $DOSPC_WAV ($Nb points)" echo " Sufficient data for bulletins : $DOBLL_WAV ($Nb points)" echo " Boundary points : $DOBNDPNT_WAV" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # --------------------------------------------------------------------------- # -# 2. Make files for processing boundary points +# 2. Make files for processing boundary points # # 2.a Command file set-up set +x echo ' Making command file for wave post points ' - [[ "$LOUD" = YES ]] && set -x + set_trace rm -f cmdfile touch cmdfile chmod 744 cmdfile -# 1.a.2 Loop over forecast time to generate post files +# 1.a.2 Loop over forecast time to generate post files fhr=$FHMIN_WAV while [ $fhr -le $FHMAX_WAV_PNT ]; do - + echo " Creating the wave point scripts at : $(date)" - ymdh=$($NDATE $fhr $CDATE) - YMD=$(echo $ymdh | cut -c1-8) - HMS="$(echo $ymdh | cut -c9-10)0000" + ymdh=$($NDATE "${fhr}" "${PDY}${cyc}") + YMD=${ymdh:0:8} + HMS="${ymdh:8:2}0000" YMDHMS=${YMD}${HMS} - FH3=$(printf %03i $fhr) + FH3=$(printf %03i ${fhr}) rm -f tmpcmdfile.${FH3} - touch tmpcmdfile.${FH3} + touch tmpcmdfile.${FH3} mkdir output_$YMDHMS cd output_$YMDHMS @@ -380,21 +364,20 @@ export BULLDATA=${DATA}/output_$YMDHMS cp $DATA/mod_def.${waveuoutpGRD} mod_def.${waveuoutpGRD} - pfile=$COMIN/rundata/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} + pfile="${COM_WAVE_HISTORY}/${WAV_MOD_TAG}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" if [ -f ${pfile} ] - then + then ln -fs ${pfile} ./out_pnt.${waveuoutpGRD} - else + else echo " FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.$waveuoutpGRD.${YMD}.${HMS} " echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : NO RAW POINT OUTPUT FILE out_pnt.$waveuoutpGRD.${YMD}.${HMS}" + set_trace err=7; export err;${errchk} exit $err fi cd $DATA - + if [ "$DOSPC_WAV" = 'YES' ] then export dtspec=3600. @@ -455,18 +438,18 @@ nlines=$( wc -l cmdfile | awk '{print $1}' ) while [ $iline -le $nlines ]; do line=$( sed -n ''$iline'p' cmdfile ) - if [ -z "$line" ]; then + if [ -z "$line" ]; then break else - if [ "$ifirst" = 'yes' ]; then - echo "#!/bin/sh" > cmdmfile.$nfile + if [ "$ifirst" = 'yes' ]; then + echo "#!/bin/sh" > cmdmfile.$nfile echo "$nfile cmdmfile.$nfile" >> cmdmprog chmod 744 cmdmfile.$nfile fi echo $line >> cmdmfile.$nfile nfile=$(( nfile + 1 )) if [ $nfile -eq $NTASKS ]; then - nfile=0 + nfile=0 ifirst='no' fi iline=$(( iline + 1 )) @@ -482,7 +465,7 @@ echo " Executing the wave point scripts at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace if [ "$wavenproc" -gt '1' ] then @@ -494,7 +477,7 @@ exit=$? else chmod 744 cmdfile - ./cmdfile + ./cmdfile exit=$? fi @@ -507,12 +490,12 @@ echo '*************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=8; export err;${errchk} exit $err fi -# 2.b Loop over each buoy to cat the final buoy file for all fhr +# 2.b Loop over each buoy to cat the final buoy file for all fhr cd $DATA @@ -574,7 +557,7 @@ echo " Executing the boundary point cat script at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace if [ "$wavenproc" -gt '1' ] then @@ -599,7 +582,7 @@ echo '*************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=9; export err;${errchk} exit $err fi @@ -618,7 +601,7 @@ echo ' ' echo ' Making command file for taring all point output files.' - [[ "$LOUD" = YES ]] && set -x + set_trace # 6.b Spectral data files @@ -676,7 +659,7 @@ echo " Executing the wave_tar scripts at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace if [ "$wavenproc" -gt '1' ] then @@ -701,7 +684,7 @@ echo '*************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=10; export err;${errchk} exit $err fi @@ -709,28 +692,7 @@ # --------------------------------------------------------------------------- # # 4. Ending output - set +x - echo ' ' - echo "Ending at : $(date)" - echo '-----------' - echo ' ' - echo ' *** End of MWW3 pnt postprocessor ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - if [ "$exit_code" -ne '0' ] - then - echo " FATAL ERROR: Problem in MWW3 PNT POST" - msg="ABNORMAL EXIT: Problem in MWW3 PNT POST" - postmsg "$jlogfile" "$msg" - echo $msg - err=11; export err;${errchk} - exit $err - else - echo " Point Wave Post Completed Normally " - msg="$job completed normally" - postmsg "$jlogfile" "$msg" - exit 0 - fi +exit $exit_code # End of MWW3 point prostprocessor script ---------------------------------------- # diff --git a/scripts/exgfs_wave_prdgen_bulls.sh b/scripts/exgfs_wave_prdgen_bulls.sh index dc46136a54..e75df8dfd1 100755 --- a/scripts/exgfs_wave_prdgen_bulls.sh +++ b/scripts/exgfs_wave_prdgen_bulls.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ############################################################################### # # # This script is the product generator ("graphics job") for the # @@ -16,20 +17,19 @@ ############################################################################### # --------------------------------------------------------------------------- # # 0. Preparations + +source "$HOMEgfs/ush/preamble.sh" + # 0.a Basic modes of operation - set -xa - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x # PATH for working and home directories - export RUNwave=${RUNwave:-${RUN}${COMPONENT}} + export RUNwave=${RUNwave:-${RUN}wave} export envir=${envir:-ops} export cyc=${cyc:-00} export cycle=${cycle:-t${cyc}z} export pgmout=OUTPUT.$$ export DATA=${DATA:-${DATAROOT:?}/${job}.$$} - #export CODEwave=${CODEwave:-${NWROOT}/${NET}_code.${wave_code_ver}/${code_pkg}} + #export CODEwave=${CODEwave:-${PACKAGEROOT}/${NET}_code.${wave_code_ver}/${code_pkg}} export EXECwave=${EXECwave:-$HOMEgfs/exec} export FIXwave=${FIXwave:-$HOMEgfs/fix} export PARMwave=${PARMwave:-$HOMEgfs/parm/parm_wave} @@ -40,10 +40,6 @@ cd $DATA export wavelog=${DATA}/${RUNwave}_prdgbulls.log - postmsg "$jlogfile" "HAS BEGUN on $(hostname)" - - msg="Starting MWW3 BULLETINS PRODUCTS SCRIPT" - postmsg "$jlogfile" "$msg" touch $wavelog # 0.b Date and time stuff export date=$PDY @@ -58,20 +54,19 @@ echo "Starting at : $(date)" echo ' ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # 1. Get necessary files set +x - echo " Copying bulletins from $COMIN" - [[ "$LOUD" = YES ]] && set -x + echo " Copying bulletins from ${COM_WAVE_STATION}" + set_trace # 1.a Link the input file and untar it - BullIn=$COMIN/station/${RUNwave}.$cycle.cbull_tar + BullIn="${COM_WAVE_STATION}/${RUNwave}.${cycle}.cbull_tar" if [ -f $BullIn ]; then cp $BullIn cbull.tar else - msg="ABNORMAL EXIT: NO BULLETIN TAR FILE" - postmsg "$jlogfile" "$msg" + echo "ABNORMAL EXIT: NO BULLETIN TAR FILE" set +x echo ' ' echo '************************************ ' @@ -79,7 +74,7 @@ echo '************************************ ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace msg="FATAL ERROR ${RUNwave} prdgen $date $cycle : bulletin tar missing." echo $msg >> $wavelog export err=1; ${errchk} @@ -88,18 +83,17 @@ set +x echo " Untarring bulletins ..." - [[ "$LOUD" = YES ]] && set -x + set_trace tar -xf cbull.tar OK=$? if [ "$OK" = '0' ]; then set +x echo " Unpacking successfull ..." - [[ "$LOUD" = YES ]] && set -x + set_trace rm -f cbull.tar else - msg="ABNORMAL EXIT: ERROR IN BULLETIN UNTAR" - postmsg "$jlogfile" "$msg" + echo "ABNORMAL EXIT: ERROR IN BULLETIN UNTAR" set +x echo ' ' echo '****************************************** ' @@ -107,7 +101,7 @@ echo '****************************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace echo "${RUNwave} prdgen $date $cycle : bulletin untar error." >> $wavelog err=2;export err;err_chk exit $err @@ -117,7 +111,7 @@ set +x echo ' Nb=$(ls -1 *.cbull | wc -l)' Nb=$(ls -1 *.cbull | wc -l) - [[ "$LOUD" = YES ]] && set -x + set_trace echo ' ' echo " Number of bulletin files : $Nb" echo ' --------------------------' @@ -127,7 +121,6 @@ cp $PARMwave/bull_awips_gfswave awipsbull.data else msg="ABNORMAL EXIT: NO AWIPS BULLETIN HEADER DATA FILE" - postmsg "$jlogfile" "$msg" set +x echo ' ' echo '******************************************* ' @@ -135,7 +128,7 @@ echo '******************************************* ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace echo "${RUNwave} prdgen $date $cycle : Bulletin header data file missing." >> $wavelog err=3;export err;err_chk exit $err @@ -148,13 +141,12 @@ echo ' Sourcing data file with header info ...' # 2.b Set up environment variables - [[ "$LOUD" = YES ]] && set -x + set_trace . awipsbull.data # 2.c Generate list of bulletins to process echo ' Generating buoy list ...' - echo 'bulls=$(sed -e 's/export b//g' -e 's/=/ /' awipsbull.data | grep -v "#" |awk '{ print $1}')' - bulls=$(sed -e 's/export b//g' -e 's/=/ /' awipsbull.data | grep -v "#" |awk '{ print $1}') + bulls=$(sed -e 's/export b//g' -e 's/=/ /' awipsbull.data | grep -v "#" |awk '{print $1}') # 2.d Looping over buoys running formbul echo ' Looping over buoys ... \n' @@ -166,9 +158,8 @@ echo " Processing $bull ($headr $oname) ..." if [ -z "$headr" ] || [ ! -s $fname ]; then - [[ "$LOUD" = YES ]] && set -x + set_trace msg="ABNORMAL EXIT: MISSING BULLETING INFO" - postmsg "$jlogfile" "$msg" set +x echo ' ' echo '******************************************** ' @@ -176,23 +167,22 @@ echo '******************************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace echo "${RUNwave} prdgen $date $cycle : Missing bulletin data." >> $wavelog err=4;export err;err_chk exit $err fi - [[ "$LOUD" = YES ]] && set -x + set_trace - formbul.pl -d $headr -f $fname -j $job -m ${RUNwave} \ - -p $PCOM -s NO -o $oname > formbul.out 2>&1 + formbul.pl -d "${headr}" -f "${fname}" -j "${job}" -m "${RUNwave}" \ + -p "${COM_WAVE_WMO}" -s "NO" -o "${oname}" > formbul.out 2>&1 OK=$? if [ "$OK" != '0' ] || [ ! -f $oname ]; then - [[ "$LOUD" = YES ]] && set -x + set_trace cat formbul.out msg="ABNORMAL EXIT: ERROR IN formbul" - postmsg "$jlogfile" "$msg" set +x echo ' ' echo '************************************** ' @@ -200,7 +190,7 @@ echo '************************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace echo "${RUNwave} prdgen $date $cycle : error in formbul." >> $wavelog err=5;export err;err_chk exit $err @@ -211,41 +201,30 @@ done # 3. Send output files to the proper destination - [[ "$LOUD" = YES ]] && set -x - if [ "$SENDCOM" = YES ]; then - cp awipsbull.$cycle.${RUNwave} $PCOM/awipsbull.$cycle.${RUNwave} - if [ "$SENDDBN_NTC" = YES ]; then - make_ntc_bull.pl WMOBH NONE KWBC NONE $DATA/awipsbull.$cycle.${RUNwave} $PCOM/awipsbull.$cycle.${RUNwave} - else - if [ "${envir}" = "para" ] || [ "${envir}" = "test" ] || [ "${envir}" = "dev" ]; then - echo "Making NTC bulletin for parallel environment, but do not alert." - [[ "$LOUD" = YES ]] && set -x - (export SENDDBN=NO; make_ntc_bull.pl WMOBH NONE KWBC NONE \ - $DATA/awipsbull.$cycle.${RUNwave} $PCOM/awipsbull.$cycle.${RUNwave}) - fi - fi - fi +set_trace +if [ "$SENDCOM" = YES ]; then + cp "awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}" + if [ "$SENDDBN_NTC" = YES ]; then + make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" "${DATA}/awipsbull.${cycle}.${RUNwave}" \ + "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}" + else + if [ "${envir}" = "para" ] || [ "${envir}" = "test" ] || [ "${envir}" = "dev" ]; then + echo "Making NTC bulletin for parallel environment, but do not alert." + (export SENDDBN=NO; make_ntc_bull.pl "WMOBH" "NONE" "KWBC" "NONE" \ + "${DATA}/awipsbull.${cycle}.${RUNwave}" "${COM_WAVE_WMO}/awipsbull.${cycle}.${RUNwave}") + fi + fi +fi # --------------------------------------------------------------------------- # # 4. Clean up - set +x; [[ "$LOUD" = YES ]] && set -v + set -v rm -f ${RUNwave}.*.cbull awipsbull.data set +v # --------------------------------------------------------------------------- # # 5. Ending output - set +x - echo ' ' - echo ' ' - echo "Ending at : $(date)" - echo ' ' - echo ' *** End of MWW3 BULLETINS product generation ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - - msg="$job completed normally" - postmsg "$jlogfile" "$msg" # End of MWW3 product generation script -------------------------------------- # diff --git a/scripts/exgfs_wave_prdgen_gridded.sh b/scripts/exgfs_wave_prdgen_gridded.sh index ed6141afec..de7f2c4974 100755 --- a/scripts/exgfs_wave_prdgen_gridded.sh +++ b/scripts/exgfs_wave_prdgen_gridded.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ############################################################################### # # # This script is the product generator ("graphics job") for the # @@ -17,13 +18,12 @@ ############################################################################### # --------------------------------------------------------------------------- # # 0. Preparations + +source "$HOMEgfs/ush/preamble.sh" + # 0.a Basic modes of operation - set -xa - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x - export RUNwave=${RUNwave:-${RUN}${COMPONENT}} + export RUNwave=${RUNwave:-${RUN}wave} export envir=${envir:-ops} export fstart=${fstart:-0} export FHMAX_WAV=${FHMAX_WAV:-180} #180 Total of hours to process @@ -40,14 +40,13 @@ export DATA=${DATA:-${DATAROOT:?}/${job}.$$} mkdir -p $DATA cd $DATA - export wavelog=${DATA}/${COMPONENTwave}_prdggridded.log + export wavelog=${DATA}/${RUNwave}_prdggridded.log - postmsg "$jlogfile" "HAS BEGUN on $(hostname)" - msg="Starting MWW3 GRIDDED PRODUCTS SCRIPT" - postmsg "$jlogfile" "$msg" + echo "Starting MWW3 GRIDDED PRODUCTS SCRIPT" # Output grids - grids=${grids:-ao_9km at_10m ep_10m wc_10m glo_30m} -# grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m} + # grids=${grids:-ao_9km at_10m ep_10m wc_10m glo_30m} +grids=${grids:-ak_10m at_10m ep_10m wc_10m glo_30m} +# export grids=${wavepostGRD} maxtries=${maxtries:-720} # 0.b Date and time stuff export date=$PDY @@ -63,14 +62,14 @@ echo " AWIPS grib fields" echo " Wave Grids : $grids" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # --------------------------------------------------------------------------- # # 1. Get necessary files echo ' ' echo 'Preparing input files :' echo '-----------------------' - [[ "$LOUD" = YES ]] && set -x + set_trace #======================================================================= ASWELL=(SWELL1 SWELL2) # Indices of HS from partitions @@ -99,7 +98,7 @@ esac # - GRIBIN=$COMIN/gridded/$RUNwave.$cycle.$grdID.f${fhr}.grib2 + GRIBIN="${COM_WAVE_GRID}/${RUNwave}.${cycle}.${grdID}.f${fhr}.grib2" GRIBIN_chk=$GRIBIN.idx icnt=1 @@ -113,14 +112,13 @@ fi if [ $icnt -ge $maxtries ]; then msg="ABNORMAL EXIT: NO GRIB FILE FOR GRID $GRIBIN" - postmsg "$jlogfile" "$msg" echo ' ' echo '**************************** ' echo '*** ERROR : NO GRIB FILE *** ' echo '**************************** ' echo ' ' echo $msg - [[ "$LOUD" = YES ]] && set -x + set_trace echo "$RUNwave $grdID ${fhr} prdgen $date $cycle : GRIB file missing." >> $wavelog err=1;export err;${errchk} || exit ${err} fi @@ -177,19 +175,18 @@ # 2.a.1 Set up for tocgrib2 echo " Do set up for tocgrib2." - [[ "$LOUD" = YES ]] && set -x + set_trace #AWIPSGRB=awipsgrib.$grdID.f${fhr} AWIPSGRB=awipsgrib # 2.a.2 Make GRIB index echo " Make GRIB index for tocgrib2." - [[ "$LOUD" = YES ]] && set -x + set_trace $GRB2INDEX gribfile.$grdID.f${fhr} gribindex.$grdID.f${fhr} OK=$? if [ "$OK" != '0' ] then msg="ABNORMAL EXIT: ERROR IN grb2index MWW3 for grid $grdID" - postmsg "$jlogfile" "$msg" #set +x echo ' ' echo '******************************************** ' @@ -197,7 +194,7 @@ echo '******************************************** ' echo ' ' echo $msg - #[[ "$LOUD" = YES ]] && set -x + #set_trace echo "$RUNwave $grdID prdgen $date $cycle : error in grbindex." >> $wavelog err=4;export err;err_chk fi @@ -205,7 +202,7 @@ # 2.a.3 Run AWIPS GRIB packing program tocgrib2 echo " Run tocgrib2" - [[ "$LOUD" = YES ]] && set -x + set_trace export pgm=tocgrib2 export pgmout=tocgrib2.out . prep_step @@ -219,7 +216,6 @@ if [ "$OK" != '0' ]; then cat tocgrib2.out msg="ABNORMAL EXIT: ERROR IN tocgrib2" - postmsg "$jlogfile" "$msg" #set +x echo ' ' echo '*************************************** ' @@ -227,7 +223,7 @@ echo '*************************************** ' echo ' ' echo $msg - #[[ "$LOUD" = YES ]] && set -x + #set_trace echo "$RUNwave prdgen $date $cycle : error in tocgrib2." >> $wavelog err=5;export err;err_chk else @@ -236,21 +232,21 @@ # 2.a.7 Get the AWIPS grib bulletin out ... #set +x echo " Get awips GRIB bulletins out ..." - #[[ "$LOUD" = YES ]] && set -x + #set_trace if [ "$SENDCOM" = 'YES' ] then #set +x echo " Saving $AWIPSGRB.$grdOut.f${fhr} as grib2.$cycle.awipsww3_${grdID}.f${fhr}" - echo " in $PCOM" - #[[ "$LOUD" = YES ]] && set -x - cp $AWIPSGRB.$grdID.f${fhr} $PCOM/grib2.$cycle.f${fhr}.awipsww3_${grdOut} + echo " in ${COM_WAVE_WMO}" + #set_trace + cp "${AWIPSGRB}.${grdID}.f${fhr}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}" #set +x fi if [ "$SENDDBN" = 'YES' ] then echo " Sending $AWIPSGRB.$grdID.f${fhr} to DBRUN." - $DBNROOT/bin/dbn_alert GRIB_LOW $RUN $job $PCOM/grib2.$cycle.f${fhr}.awipsww3_${grdOut} + "${DBNROOT}/bin/dbn_alert" GRIB_LOW "${RUN}" "${job}" "${COM_WAVE_WMO}/grib2.${cycle}.f${fhr}.awipsww3_${grdOut}" fi rm -f $AWIPSGRB.$grdID.f${fhr} tocgrib2.out done # For grids @@ -268,22 +264,13 @@ # --------------------------------------------------------------------------- # # 5. Clean up - set +x; [[ "$LOUD" = YES ]] && set -v + set -v rm -f gribfile gribindex.* awipsgrb.* awipsbull.data set +v # --------------------------------------------------------------------------- # # 6. Ending output - echo ' ' - echo ' ' - echo "Ending at : $(date)" - echo ' ' - echo ' *** End of MWW3 product generation ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - msg="$job completed normally" - postmsg "$jlogfile" "$msg" # End of GFSWAVE product generation script -------------------------------------- # diff --git a/scripts/exgfs_wave_prep.sh b/scripts/exgfs_wave_prep.sh index fc0180a808..be006c1c85 100755 --- a/scripts/exgfs_wave_prep.sh +++ b/scripts/exgfs_wave_prep.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -12,9 +12,7 @@ # as follows: # # wave_prnc_ice.sh : preprocess ice fields. # -# wave_prnc_wnd.sh : preprocess wind fields (uncoupled run, not active) # # wave_prnc_cur.sh : preprocess current fields. # -# wave_g2ges.sh : find and copy wind grib2 files. # # # # Remarks : # # - For non-fatal errors output is witten to the wave.log file. # @@ -32,6 +30,7 @@ # Nov2019 JHAlves - Merging wave scripts to global workflow # # Jun2020 JHAlves - Porting to R&D machine Hera # # Oct2020 JMeixner - Updating RTOFS dates for processing minimal amount # +# May2022 JMeixner - Clean up and moving input to other routine # # # # WAV_MOD_ID and WAV_MOD_TAG replace modID. WAV_MOD_TAG # # is used for ensemble-specific I/O. For deterministic # @@ -40,46 +39,32 @@ ############################################################################### # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - set -x - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +source "$HOMEgfs/ush/preamble.sh" + +# 0.a Basic modes of operation # Set wave model ID tag to include member number # if ensemble; waveMEMB var empty in deterministic - export WAV_MOD_TAG=${CDUMP}wave${waveMEMB} + export WAV_MOD_TAG=${RUN}wave${waveMEMB} cd $DATA mkdir outtmp - msg="HAS BEGUN on $(hostname)" - postmsg "$jlogfile" "$msg" - msg="Starting MWW3 PREPROCESSOR SCRIPT for $WAV_MOD_TAG" - postmsg "$jlogfile" "$msg" + echo "HAS BEGUN on $(hostname)" + echo "Starting MWW3 PREPROCESSOR SCRIPT for $WAV_MOD_TAG" set +x echo ' ' echo ' ********************************' - echo ' *** MWW3 PREPROCESSOR SCRIPT ***' + echo ' *** WW3 PREPROCESSOR SCRIPT ***' echo ' ********************************' echo ' PREP for wave component of NCEP coupled system' echo " Wave component identifier : $WAV_MOD_TAG " echo ' ' echo "Starting at : $(date)" echo ' ' - [[ "$LOUD" = YES ]] && set -x - - # export MP_PGMMODEL=mpmd - # export MP_CMDFILE=./cmdfile - - if [ "$INDRUN" = 'no' ] - then - FHMAX_WAV=${FHMAX_WAV:-3} - else - FHMAX_WAV=${FHMAX_WAV:-384} - fi + set_trace # 0.b Date and time stuff @@ -144,7 +129,7 @@ echo " starting time : $time_beg" echo " ending time : $time_end" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # Script will run only if pre-defined NTASKS # The actual work is distributed over these tasks. @@ -161,7 +146,7 @@ echo 'Preparing input files :' echo '-----------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # 1.a Model definition files @@ -169,24 +154,22 @@ touch cmdfile grdINP='' - if [ "${WW3ATMINP}" = 'YES' ]; then grdINP="${grdINP} $WAVEWND_FID" ; fi - if [ "${WW3ICEINP}" = 'YES' ]; then grdINP="${grdINP} $WAVEICE_FID" ; fi - if [ "${WW3CURINP}" = 'YES' ]; then grdINP="${grdINP} $WAVECUR_FID" ; fi + if [ "${WW3ATMINP}" = 'YES' ]; then grdINP="${grdINP} $WAVEWND_FID" ; fi + if [ "${WW3ICEINP}" = 'YES' ]; then grdINP="${grdINP} $WAVEICE_FID" ; fi + if [ "${WW3CURINP}" = 'YES' ]; then grdINP="${grdINP} $WAVECUR_FID" ; fi ifile=1 for grdID in $grdINP $waveGRD do - if [ -f "$COMIN/rundata/${CDUMP}wave.mod_def.${grdID}" ] + if [ -f "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" ] then set +x - echo " Mod def file for $grdID found in ${COMIN}/rundata. copying ...." - [[ "$LOUD" = YES ]] && set -x - cp $COMIN/rundata/${CDUMP}wave.mod_def.${grdID} mod_def.$grdID + echo " Mod def file for $grdID found in ${COM_WAVE_PREP}. copying ...." + set_trace + cp ${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID} mod_def.$grdID else - msg="FATAL ERROR: NO MODEL DEFINITION FILE" - postmsg "$jlogfile" "$msg" set +x echo ' ' echo '*********************************************************** ' @@ -194,16 +177,16 @@ echo '*********************************************************** ' echo " grdID = $grdID" echo ' ' - echo $msg - [[ "$LOUD" = YES ]] && set -x + echo "FATAL ERROR: NO MODEL DEFINITION FILE" + set_trace err=2;export err;${errchk} fi done # 1.b Netcdf Preprocessor template files - if [ "$WW3ATMINP" = 'YES' ]; then itype="$itype wind" ; fi - if [ "$WW3ICEINP" = 'YES' ]; then itype="$itype ice" ; fi - if [ "$WW3CURINP" = 'YES' ]; then itype="$itype cur" ; fi + if [[ "${WW3ATMINP}" == 'YES' ]]; then itype="${itype:-} wind" ; fi + if [[ "${WW3ICEINP}" == 'YES' ]]; then itype="${itype:-} ice" ; fi + if [[ "${WW3CURINP}" == 'YES' ]]; then itype="${itype:-} cur" ; fi for type in $itype do @@ -235,10 +218,8 @@ echo ' ' echo " ww3_prnc.${type}.$grdID.inp.tmpl copied ($PARMwave)." echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace else - msg="ABNORMAL EXIT: NO FILE $file" - ./postmsg "$jlogfile" "$msg" set +x echo ' ' echo '************************************** ' @@ -246,9 +227,9 @@ echo '************************************** ' echo " ww3_prnc.${type}.$grdID.inp.tmpl" echo ' ' - echo $msg + echo "ABNORMAL EXIT: NO FILE $file" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=4;export err;${errchk} fi done @@ -271,14 +252,13 @@ if [ -d ice ] then - postmsg "$jlogfile" "FATAL ERROR ice field not generated." set +x echo ' ' echo ' FATAL ERROR: ice field not generated ' echo ' ' sed "s/^/wave_prnc_ice.out : /g" wave_prnc_ice.out echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=5;export err;${errchk} else mv -f wave_prnc_ice.out $DATA/outtmp @@ -286,7 +266,7 @@ echo ' ' echo ' Ice field unpacking successful.' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace fi else echo ' ' @@ -301,335 +281,24 @@ # --------------------------------------------------------------------------- # # WIND processing -# This block of code is not used by GFSv16b and is here for un-coupled wave runs if [ "${WW3ATMINP}" = 'YES' ]; then -# --------------------------------------------------------------------------- # -# 3. Wind pre-processing - - if [ "${RUNMEM}" = "-1" ] || [ "${WW3ATMIENS}" = "T" ] || [ "$waveMEMB" = "00" ] - then - - rm -f cmdfile - touch cmdfile - chmod 744 cmdfile - -# 3.a Gather and pre-process grib2 files - ymdh=$ymdh_beg - - if [ ${CFP_MP:-"NO"} = "YES" ]; then nm=0 ; fi # Counter for MP CFP - while [ "$ymdh" -le "$ymdh_end" ] - do - if [ ${CFP_MP:-"NO"} = "YES" ]; then - echo "$nm $USHwave/wave_g2ges.sh $ymdh > grb_$ymdh.out 2>&1" >> cmdfile - nm=$(expr $nm + 1) - else - echo "$USHwave/wave_g2ges.sh $ymdh > grb_$ymdh.out 2>&1" >> cmdfile - fi - ymdh=$($NDATE $WAV_WND_HOUR_INC $ymdh) - done - -# 3.b Execute the serial or parallel cmdfile - -# Set number of processes for mpmd - cat cmdfile - - wavenproc=$(wc -l cmdfile | awk '{print $1}') - wavenproc=$(echo $((${wavenproc}<${NTASKS}?${wavenproc}:${NTASKS}))) - - set +x - echo ' ' - echo " Executing the wnd grib cmd file at : $(date)" - echo ' ------------------------------------' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - - if [ "$wavenproc" -gt '1' ] - then - if [ ${CFP_MP:-"NO"} = "YES" ]; then - ${wavempexec} -n ${wavenproc} ${wave_mpmd} cmdfile - else - ${wavempexec} ${wavenproc} ${wave_mpmd} cmdfile - fi - exit=$? - else - ./cmdfile - exit=$? - fi - - if [ "$exit" != '0' ] - then - set +x - echo ' ' - echo '********************************************************' - echo '*** FATAL ERROR: CMDFILE FAILED IN WIND GENERATION ***' - echo '********************************************************' - echo ' See Details Below ' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - fi - -# 3.c Check for errors - - set +x - echo ' ' - echo ' Checking for errors.' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - -# We will go on if the number of errors in files is less -# than err_max - - [[ "$LOUD" = YES ]] && set -x - err_max=1 - - - ymdh=$ymdh_beg - nr_err=0 - - set +x - echo ' Sources of grib2 files :' - [[ "$LOUD" = YES ]] && set -x - while [ "$ymdh" -le "$ymdh_end" ] - do - if [ -d grb_${ymdh} ] - then - set +x - echo ' ' - echo " File for $ymdh : error in wave_g2ges.sh" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" " File for $ymdh : error in wave_g2ges.sh" - nr_err=$(expr $nr_err + 1) - rm -f gwnd.$ymdh - else - grbfile=$(grep 'File for' grb_${ymdh}.out) - if [ -z "$grbfile" ] - then - set +x - echo ' ' - echo " File for $ymdh : cannot identify source" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - nr_err=$(expr $nr_err + 1) - rm -f gwnd.$ymdh - else - if [ ! -f gwnd.$ymdh ] - then - set +x - echo ' ' - echo " File for $ymdh : file not found" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - nr_err=$(expr $nr_err + 1) - else - set +x - echo ' ' - echo " $grbfile" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - mv -f grb_${ymdh}.out $DATA/outtmp - fi - fi - fi - ymdh=$($NDATE $WAV_WND_HOUR_INC $ymdh) - done - - if [ -f grb_*.out ] - then - set +x - echo ' ' - echo '**********************************' - echo '*** ERROR OUTPUT wave_g2ges.sh ***' - echo '**********************************' - echo ' Possibly in multiple calls' - [[ "$LOUD" = YES ]] && set -x - set +x - for file in grb_*.out - do - echo ' ' - sed "s/^/$file : /g" $file - done - echo ' ' - [[ "$LOUD" = YES ]] && set -x - mv -f grb_*.out $DATA/outtmp - postmsg "$jlogfile" "WARNING: NON-FATAL ERROR in wave_g2ges.sh, possibly in multiple calls." - fi - - if [ "$nr_err" -gt "$err_max" ] - then - msg="ABNORMAL EXIT: TOO MANY MISSING WIND INPUT GRB2 FILES" - postmsg "$jlogfile" "$msg" - set +x - echo ' ' - echo '********************************************* ' - echo '*** FATAL ERROR : ERROR(S) IN WIND FILES *** ' - echo '********************************************* ' - echo ' ' - echo $msg - [[ "$LOUD" = YES ]] && set -x - err=6;export err;${errchk} - fi - - rm -f cmdfile - -# 3.d Getwind data into single file - - set +x - echo ' ' - echo ' Concatenate extracted wind fields ...' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - - files=$(ls gwnd.* 2> /dev/null) - - if [ -z "$files" ] - then - msg="ABNORMAL EXIT: NO gwnd.* FILES FOUND" - postmsg "$jlogfile" "$msg" - set +x - echo ' ' - echo '******************************************** ' - echo '*** FATAL ERROR : CANNOT FIND WIND FILES *** ' - echo '******************************************** ' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - err=7;export err;${errchk} - fi - - rm -f gfs.wind - - for file in $files - do - cat $file >> gfs.wind - rm -f $file - done - -# 3.e Run ww3_prnc - -# Convert gfs wind to netcdf - $WGRIB2 gfs.wind -netcdf gfs.nc - - for grdID in $WAVEWND_FID $curvID - do - - set +x - echo ' ' - echo " Running wind fields through preprocessor for grid $grdID" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - - sed -e "s/HDRFL/T/g" ww3_prnc.wind.$grdID.tmpl > ww3_prnc.inp - ln -sf mod_def.$grdID mod_def.ww3 - - set +x - echo "Executing $EXECwave/ww3_prnc" - [[ "$LOUD" = YES ]] && set -x - - $EXECwave/ww3_prnc > prnc.out - err=$? - - if [ "$err" != '0' ] - then - msg="ABNORMAL EXIT: ERROR IN waveprnc" - postmsg "$jlogfile" "$msg" - set +x - echo ' ' - echo '*************************************** ' - echo '*** FATAL ERROR : ERROR IN waveprnc *** ' - echo '*************************************** ' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - err=8;export err;${errchk} - fi - - if [ ! -f wind.ww3 ] - then - msg="ABNORMAL EXIT: FILE wind.ww3 MISSING" - postmsg "$jlogfile" "$msg" - set +x - echo ' ' - cat waveprep.out - echo ' ' - echo '****************************************' - echo '*** FATAL ERROR : wind.ww3 NOT FOUND ***' - echo '****************************************' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - err=9;export err;${errchk} - fi - - rm -f mod_def.ww3 - rm -f ww3_prep.inp - - mv wind.ww3 wind.$grdID - mv times.WND times.$grdID - -# 3.f Check to make sure wind files are properly incremented - - first_pass='yes' - windOK='yes' - while read line - do - date1=$(echo $line | cut -d ' ' -f 1) - date2=$(echo $line | cut -d ' ' -f 2) - ymdh="$date1$(echo $date2 | cut -c1-2)" - if [ "$first_pass" = 'no' ] - then - hr_inc=$($NHOUR $ymdh $ymdh_prev) - if [ "${hr_inc}" -gt "${WAV_WND_HOUR_INC}" ] - then - set +x - echo "Incorrect wind forcing increment at $ymdh" - [[ "$LOUD" = YES ]] && set -x - windOK='no' - fi - fi - ymdh_prev=$ymdh - first_pass='no' - done < times.$grdID - - if [ "$windOK" = 'no' ] - then - set +x - echo ' ' - echo '******************************************************' - echo '*** FATAL ERROR : WIND DATA INCREMENT INCORRECT !! ***' - echo '******************************************************' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR: $WAV_MOD_TAG prep $grdID $date $cycle : error in wind increment." - err=10;export err;${errchk} - fi - - done - - rm -f gfs.wind - rm -f mod_def.ww3 - rm -f ww3_prnc.inp - else - echo ' ' - echo " Wind input is not perturbed, single wnd file generated, skipping ${WAV_MOD_TAG}" - echo ' ' - - fi - - else + echo ' ' + echo '*************************************************** ' + echo '*** FATAL ERROR : Not set-up to preprocess wind *** ' + echo '*************************************************** ' + echo ' ' + set_trace + err=6;export err;${errchk} - echo ' ' - echo ' Atmospheric inputs not generated, this run did not request pre-processed winds ' - echo ' ' - fi #------------------------------------------------------------------- -# CURR processing +# 3. Process current fields if [ "${WW3CURINP}" = 'YES' ]; then -#------------------------------------------------------------------- -# 4. Process current fields -# 4.a Get into single file +# Get into single file if [ "${RUNMEM}" = "-1" ] || [ "${WW3CURIENS}" = "T" ] || [ "$waveMEMB" = "00" ] then @@ -637,7 +306,7 @@ echo ' ' echo ' Concatenate binary current fields ...' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # Prepare files for cfp process rm -f cmdfile @@ -650,22 +319,22 @@ export RPDY=$($NDATE -24 ${RPDY}00 | cut -c1-8) fi #Set the first time for RTOFS files to be the beginning time of simulation - ymdh_rtofs=$ymdh_beg + ymdh_rtofs=$ymdh_beg if [ "$FHMAX_WAV_CUR" -le 72 ]; then - rtofsfile1=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f024_prog.nc - rtofsfile2=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f048_prog.nc - rtofsfile3=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f072_prog.nc + rtofsfile1="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f024_prog.nc" + rtofsfile2="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f048_prog.nc" + rtofsfile3="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f072_prog.nc" if [ ! -f $rtofsfile1 ] || [ ! -f $rtofsfile2 ] || [ ! -f $rtofsfile3 ]; then #Needed current files are not available, so use RTOFS from previous day export RPDY=$($NDATE -24 ${RPDY}00 | cut -c1-8) fi else - rtofsfile1=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f096_prog.nc - rtofsfile2=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f120_prog.nc - rtofsfile3=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f144_prog.nc - rtofsfile4=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f168_prog.nc - rtofsfile5=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f192_prog.nc + rtofsfile1="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f096_prog.nc" + rtofsfile2="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f120_prog.nc" + rtofsfile3="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f144_prog.nc" + rtofsfile4="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f168_prog.nc" + rtofsfile5="${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_f192_prog.nc" if [ ! -f $rtofsfile1 ] || [ ! -f $rtofsfile2 ] || [ ! -f $rtofsfile3 ] || [ ! -f $rtofsfile4 ] || [ ! -f $rtofsfile5 ]; then #Needed current files are not available, so use RTOFS from previous day @@ -673,8 +342,6 @@ fi fi - export COMIN_WAV_CUR=$COMIN_WAV_RTOFS/${WAVECUR_DID}.${RPDY} - ymdh_end_rtofs=$($NDATE ${FHMAX_WAV_CUR} ${RPDY}00) if [ "$ymdh_end" -lt "$ymdh_end_rtofs" ]; then ymdh_end_rtofs=$ymdh_end @@ -693,8 +360,8 @@ fhr_rtofs=$(${NHOUR} ${ymdh_rtofs} ${RPDY}00) fh3_rtofs=$(printf "%03d" "${fhr_rtofs#0}") - curfile1h=${COMIN_WAV_CUR}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc - curfile3h=${COMIN_WAV_CUR}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc + curfile1h=${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc + curfile3h=${COM_RTOFS}/${WAVECUR_DID}.${RPDY}/rtofs_glo_2ds_${fext}${fh3_rtofs}_prog.nc if [ -s ${curfile1h} ] && [ "${FLGHF}" = "T" ] ; then curfile=${curfile1h} @@ -708,14 +375,14 @@ else curfile=${curfile3h} fi - set $setoff + set -x echo ' ' echo '************************************** ' echo "*** FATAL ERROR: NO CUR FILE $curfile *** " echo '************************************** ' echo ' ' - set $seton - postmsg "$jlogfile" "FATAL ERROR - NO CURRENT FILE (RTOFS)" + set_trace + echo "FATAL ERROR - NO CURRENT FILE (RTOFS)" err=11;export err;${errchk} exit $err echo ' ' @@ -747,7 +414,7 @@ echo " Executing the curr prnc cmdfile at : $(date)" echo ' ------------------------------------' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace if [ $wavenproc -gt '1' ] then @@ -772,22 +439,21 @@ echo '********************************************' echo ' See Details Below ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace fi files=$(ls ${WAVECUR_DID}.* 2> /dev/null) if [ -z "$files" ] then - msg="ABNORMAL EXIT: NO ${WAVECUR_FID}.* FILES FOUND" - postmsg "$jlogfile" "$msg" set +x echo ' ' echo '******************************************** ' echo '*** FATAL ERROR : CANNOT FIND CURR FILES *** ' echo '******************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + echo "ABNORMAL EXIT: NO ${WAVECUR_FID}.* FILES FOUND" + set_trace err=11;export err;${errchk} fi @@ -799,7 +465,7 @@ cat $file >> cur.${WAVECUR_FID} done - cp -f cur.${WAVECUR_FID} ${COMOUT}/rundata/${CDUMP}wave.${WAVECUR_FID}.$cycle.cur + cp -f cur.${WAVECUR_FID} ${COM_WAVE_PREP}/${RUN}wave.${WAVECUR_FID}.$cycle.cur else echo ' ' @@ -816,276 +482,7 @@ fi # --------------------------------------------------------------------------- # -# 5. Create ww3_multi.inp - -# 5.a ww3_multi template - - if [ -f $PARMwave/ww3_multi.${NET}.inp.tmpl ] - then - cp $PARMwave/ww3_multi.${NET}.inp.tmpl ww3_multi.inp.tmpl - fi - - if [ ! -f ww3_multi.inp.tmpl ] - then - msg="ABNORMAL EXIT: NO TEMPLATE FOR INPUT FILE" - postmsg "$jlogfile" "$msg" - set +x - echo ' ' - echo '************************************************ ' - echo '*** FATAL ERROR : NO TEMPLATE FOR INPUT FILE *** ' - echo '************************************************ ' - echo ' ' - echo $msg - [[ "$LOUD" = YES ]] && set -x - err=12;export err;${errchk} - fi - -# 5.b Buoy location file - - if [ -f $PARMwave/wave_${NET}.buoys ] - then - cp $PARMwave/wave_${NET}.buoys buoy.loc - fi - - if [ -f buoy.loc ] - then - set +x - echo " buoy.loc copied ($PARMwave/wave_${NET}.buoys)." - [[ "$LOUD" = YES ]] && set -x - else - set +x - echo " buoy.loc not found. **** WARNING **** " - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" " FATAL ERROR : buoy.loc ($PARMwave/wave_${NET}.buoys) NOT FOUND" - touch buoy.loc - err=13;export err;${errchk} - fi - -# Initialize inp file parameters - NFGRIDS=0 - NMGRIDS=0 - CPLILINE='$' - ICELINE='$' - ICEFLAG='no' - CURRLINE='$' - CURRFLAG='no' - WINDLINE='$' - WINDFLAG='no' - UNIPOINTS='$' - -# Check for required inputs and coupling options - if [ $waveuoutpGRD ] - then - UNIPOINTS="'$waveuoutpGRD'" - fi - -# Check if waveesmfGRD is set - if [ ${waveesmfGRD} ] - then - NFGRIDS=$(expr $NFGRIDS + 1) - fi - - case ${WW3ATMINP} in - 'YES' ) - NFGRIDS=$(expr $NFGRIDS + 1) - WINDLINE=" '$WAVEWND_FID' F F T F F F F F F" - WINDFLAG="$WAVEWND_FID" - ;; - 'CPL' ) - WNDIFLAG='T' - if [ ${waveesmfGRD} ] - then - WINDFLAG="CPL:${waveesmfGRD}" - CPLILINE=" '${waveesmfGRD}' F F T F F F F F F" - else - WINDFLAG="CPL:native" - fi - ;; - esac - - case ${WW3ICEINP} in - 'YES' ) - NFGRIDS=$(expr $NFGRIDS + 1) - ICEIFLAG='T' - ICELINE=" '$WAVEICE_FID' F F F T F F F F F" - ICEFLAG="$WAVEICE_FID" - ;; - 'CPL' ) - ICEIFLAG='T' - if [ ${waveesmfGRD} ] - then - ICEFLAG="CPL:${waveesmfGRD}" - CPLILINE=" '${waveesmfGRD}' F F ${WNDIFLAG} T F F F F F" - else - ICEFLAG="CPL:native" - fi - ;; - esac - - case ${WW3CURINP} in - 'YES' ) - if [ "$WAVECUR_FID" != "$WAVEICE_FID" ]; then - NFGRIDS=$(expr $NFGRIDS + 1) - CURRLINE=" '$WAVECUR_FID' F T F F F F F F F" - CURRFLAG="$WAVECUR_FID" - else # cur fields share the same grid as ice grid - ICELINE=" '$WAVEICE_FID' F T F ${ICEIFLAG} F F F F F" - CURRFLAG="$WAVEICE_FID" - fi - ;; - 'CPL' ) - CURIFLAG='T' - if [ ${waveesmfGRD} ] - then - CURRFLAG="CPL:${waveesmfGRD}" - CPLILINE=" '${waveesmfGRD}' F T ${WNDIFLAG} ${ICEFLAG} F F F F F" - else - CURRFLAG="CPL:native" - fi - ;; - esac - - unset agrid - agrid= - gline= - GRDN=0 -# grdGRP=1 # Single group for now - for grid in ${waveGRD} - do - GRDN=$(expr ${GRDN} + 1) - agrid=( ${agrid[*]} ${grid} ) - NMGRIDS=$(expr $NMGRIDS + 1) - gridN=$(echo $waveGRDN | awk -v i=$GRDN '{print $i}') - gridG=$(echo $waveGRDG | awk -v i=$GRDN '{print $i}') - gline="${gline}'${grid}' 'no' 'CURRFLAG' 'WINDFLAG' 'ICEFLAG' 'no' 'no' 'no' 'no' 'no' ${gridN} ${gridG} 0.00 1.00 F\n" - done - gline="${gline}\$" - echo $gline - - sed -e "s/NFGRIDS/$NFGRIDS/g" \ - -e "s/NMGRIDS/${NMGRIDS}/g" \ - -e "s/FUNIPNT/${FUNIPNT}/g" \ - -e "s/IOSRV/${IOSRV}/g" \ - -e "s/FPNTPROC/${FPNTPROC}/g" \ - -e "s/FGRDPROC/${FGRDPROC}/g" \ - -e "s/OUTPARS/${OUTPARS_WAV}/g" \ - -e "s/CPLILINE/${CPLILINE}/g" \ - -e "s/UNIPOINTS/${UNIPOINTS}/g" \ - -e "s/GRIDLINE/${gline}/g" \ - -e "s/ICELINE/$ICELINE/g" \ - -e "s/CURRLINE/$CURRLINE/g" \ - -e "s/WINDLINE/$WINDLINE/g" \ - -e "s/ICEFLAG/$ICEFLAG/g" \ - -e "s/CURRFLAG/$CURRFLAG/g" \ - -e "s/WINDFLAG/$WINDFLAG/g" \ - -e "s/RUN_BEG/$time_beg/g" \ - -e "s/RUN_END/$time_end/g" \ - -e "s/OUT_BEG/$time_beg_out/g" \ - -e "s/OUT_END/$time_end/g" \ - -e "s/DTFLD/ $DTFLD_WAV/g" \ - -e "s/FLAGMASKCOMP/ $FLAGMASKCOMP/g" \ - -e "s/FLAGMASKOUT/ $FLAGMASKOUT/g" \ - -e "s/GOFILETYPE/ $GOFILETYPE/g" \ - -e "s/POFILETYPE/ $POFILETYPE/g" \ - -e "s/FIELDS/$FIELDS/g" \ - -e "s/DTPNT/ $DTPNT_WAV/g" \ - -e "/BUOY_FILE/r buoy.loc" \ - -e "s/BUOY_FILE/DUMMY/g" \ - -e "s/RST_BEG/$time_rst_ini/g" \ - -e "s/RSTTYPE/$RSTTYPE_WAV/g" \ - -e "s/RST_2_BEG/$time_rst2_ini/g" \ - -e "s/DTRST/$DT_1_RST_WAV/g" \ - -e "s/DT_2_RST/$DT_2_RST_WAV/g" \ - -e "s/RST_END/$time_rst1_end/g" \ - -e "s/RST_2_END/$time_rst2_end/g" \ - ww3_multi.inp.tmpl | \ - sed -n "/DUMMY/!p" > ww3_multi.inp - - rm -f ww3_multi.inp.tmpl buoy.loc - - if [ -f ww3_multi.inp ] - then - echo " Copying file ww3_multi.${WAV_MOD_TAG}.inp to $COMOUT " - cp ww3_multi.inp ${COMOUT}/rundata/ww3_multi.${WAV_MOD_TAG}.${cycle}.inp - else - echo "FATAL ERROR: file ww3_multi.${WAV_MOD_TAG}.${cycle}.inp NOT CREATED, ABORTING" - err=13;export err;${errchk} - fi - -# 6. Copy rmp grid remapping pre-processed coefficients - - if [ "${USE_WAV_RMP:-YES}" = "YES" ]; then - if ls $FIXwave/rmp_src_to_dst_conserv_* 2> /dev/null - then - for file in $(ls $FIXwave/rmp_src_to_dst_conserv_*) ; do - cp -f $file ${COMOUT}/rundata - done - else - msg="NO rmp precomputed nc files found, is this OK???" - postmsg "$jlogfile" "$msg" - set +x - echo ' ' - echo '************************************************ ' - echo '*** FATAL ERROR : NO PRECOMPUTED RMP FILES FOUND *** ' - echo '************************************************ ' - echo ' ' - echo $msg - [[ "$LOUD" = YES ]] && set -x - err=13;export err;${errchk} - fi - fi - - -# --------------------------------------------------------------------------- # -# 6. Output to /com - - if [ "$SENDCOM" = 'YES' ] - then - - if [ "${WW3ATMINP}" = 'YES' ]; then - - for grdID in $WAVEWND_FID $curvID - do - set +x - echo ' ' - echo " Saving wind.$grdID as $COMOUT/rundata/${WAV_MOD_TAG}.$grdID.$PDY$cyc.wind" - echo " Saving times.$grdID file as $COMOUT/rundata/${WAV_MOD_TAG}.$grdID.$PDY$cyc.$grdID.wind.times" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - cp wind.$grdID $COMOUT/rundata/${WAV_MOD_TAG}.$grdID.$PDY$cyc.wind - cp times.$grdID $COMOUT/rundata/${WAV_MOD_TAG}.$grdID.$PDY$cyc.$grdID.wind.times - done - fi - -# if [ "${WW3CURINP}" = 'YES' ]; then -# -# for grdID in $WAVECUR_FID -# do -# set +x -# echo ' ' -# echo " Saving cur.$grdID as $COMOUT/rundata/${WAV_MOD_TAG}.$grdID.$PDY$cyc.cur" -# echo ' ' -# [[ "$LOUD" = YES ]] && set -x -# cp cur.$grdID $COMOUT/rundata/${WAV_MOD_TAG}.$grdID.$PDY$cyc.cur -# done -# fi - fi - - rm -f wind.* - rm -f $WAVEICE_FID.* - rm -f times.* - -# --------------------------------------------------------------------------- # -# 7. Ending output - - set +x - echo ' ' - echo "Ending at : $(date)" - echo ' ' - echo ' *** End of MWW3 preprocessor ***' - echo ' ' - [[ "$LOUD" = YES ]] && set -x +# 4. Ending output - exit $err # End of MWW3 preprocessor script ------------------------------------------- # diff --git a/scripts/exglobal_aero_analysis_finalize.py b/scripts/exglobal_aero_analysis_finalize.py new file mode 100755 index 0000000000..7342bf8357 --- /dev/null +++ b/scripts/exglobal_aero_analysis_finalize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_aero_analysis_finalize.py +# This script creates an AerosolAnalysis class +# and runs the finalize method +# which perform post-processing and clean up activities +# for a global aerosol variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.aero_analysis import AerosolAnalysis + + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the aerosol analysis task + AeroAnl = AerosolAnalysis(config) + AeroAnl.finalize() diff --git a/scripts/exglobal_aero_analysis_initialize.py b/scripts/exglobal_aero_analysis_initialize.py new file mode 100755 index 0000000000..6c4135fc2d --- /dev/null +++ b/scripts/exglobal_aero_analysis_initialize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_aero_analysis_initialize.py +# This script creates an AerosolAnalysis class +# and runs the initialize method +# which create and stage the runtime directory +# and create the YAML configuration +# for a global aerosol variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.aero_analysis import AerosolAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the aerosol analysis task + AeroAnl = AerosolAnalysis(config) + AeroAnl.initialize() diff --git a/scripts/exglobal_aero_analysis_run.py b/scripts/exglobal_aero_analysis_run.py new file mode 100755 index 0000000000..887700f476 --- /dev/null +++ b/scripts/exglobal_aero_analysis_run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exglobal_aero_analysis_run.py +# This script creates an AerosolAnalysis object +# and runs the execute method +# which executes the global aerosol variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.aero_analysis import AerosolAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the aerosol analysis task + AeroAnl = AerosolAnalysis(config) + AeroAnl.execute() diff --git a/scripts/exglobal_archive.sh b/scripts/exglobal_archive.sh new file mode 100755 index 0000000000..730563e256 --- /dev/null +++ b/scripts/exglobal_archive.sh @@ -0,0 +1,475 @@ +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" + +############################################## +# Begin JOB SPECIFIC work +############################################## + +# ICS are restarts and always lag INC by $assim_freq hours +ARCHINC_CYC=${ARCH_CYC} +ARCHICS_CYC=$((ARCH_CYC-assim_freq)) +if [ "${ARCHICS_CYC}" -lt 0 ]; then + ARCHICS_CYC=$((ARCHICS_CYC+24)) +fi + +# CURRENT CYCLE +APREFIX="${RUN}.t${cyc}z." + +# Realtime parallels run GFS MOS on 1 day delay +# If realtime parallel, back up CDATE_MOS one day +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +CDATE_MOS=${PDY}${cyc} +if [ "${REALTIME}" = "YES" ]; then + CDATE_MOS=$(${NDATE} -24 "${PDY}${cyc}") +fi +PDY_MOS="${CDATE_MOS:0:8}" + +############################################################### +# Archive online for verification and diagnostics +############################################################### +source "${HOMEgfs}/ush/file_utils.sh" + +[[ ! -d ${ARCDIR} ]] && mkdir -p "${ARCDIR}" +nb_copy "${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat" "${ARCDIR}/gsistat.${RUN}.${PDY}${cyc}" +nb_copy "${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.anl" "${ARCDIR}/pgbanl.${RUN}.${PDY}${cyc}.grib2" + +# Archive 1 degree forecast GRIB2 files for verification +if [[ "${RUN}" == "gfs" ]]; then + fhmax=${FHMAX_GFS} + fhr=0 + while [ "${fhr}" -le "${fhmax}" ]; do + fhr2=$(printf %02i "${fhr}") + fhr3=$(printf %03i "${fhr}") + nb_copy "${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.f${fhr3}" "${ARCDIR}/pgbf${fhr2}.${RUN}.${PDY}${cyc}.grib2" + fhr=$((10#${fhr} + 10#${FHOUT_GFS} )) + done +fi +if [[ "${RUN}" == "gdas" ]]; then + flist="000 003 006 009" + for fhr in ${flist}; do + fname="${COM_ATMOS_GRIB_1p00}/${APREFIX}pgrb2.1p00.f${fhr}" + # TODO Shouldn't the archived files also use three-digit tags? + fhr2=$(printf %02i $((10#${fhr}))) + nb_copy "${fname}" "${ARCDIR}/pgbf${fhr2}.${RUN}.${PDY}${cyc}.grib2" + done +fi + +if [[ -s "${COM_ATMOS_TRACK}/avno.t${cyc}z.cyclone.trackatcfunix" ]]; then + # shellcheck disable=2153 + PSLOT4=${PSLOT:0:4} + # shellcheck disable= + PSLOT4=${PSLOT4^^} + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/avno.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunix.${RUN}.${PDY}${cyc}" + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/avnop.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunixp.${RUN}.${PDY}${cyc}" +fi + +if [[ "${RUN}" == "gdas" ]] && [[ -s "${COM_ATMOS_TRACK}/gdas.t${cyc}z.cyclone.trackatcfunix" ]]; then + # shellcheck disable=2153 + PSLOT4=${PSLOT:0:4} + # shellcheck disable= + PSLOT4=${PSLOT4^^} + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/gdas.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunix.${RUN}.${PDY}${cyc}" + sed "s:AVNO:${PSLOT4}:g" < "${COM_ATMOS_TRACK}/gdasp.t${cyc}z.cyclone.trackatcfunix" \ + > "${ARCDIR}/atcfunixp.${RUN}.${PDY}${cyc}" +fi + +if [ "${RUN}" = "gfs" ]; then + nb_copy "${COM_ATMOS_GENESIS}/storms.gfso.atcf_gen.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_GENESIS}/storms.gfso.atcf_gen.altg.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_TRACK}/trak.gfso.atcfunix.${PDY}${cyc}" "${ARCDIR}/." + nb_copy "${COM_ATMOS_TRACK}/trak.gfso.atcfunix.altg.${PDY}${cyc}" "${ARCDIR}/." + + mkdir -p "${ARCDIR}/tracker.${PDY}${cyc}/${RUN}" + blist="epac natl" + for basin in ${blist}; do + if [[ -f ${basin} ]]; then + cp -rp "${COM_ATMOS_TRACK}/${basin}" "${ARCDIR}/tracker.${PDY}${cyc}/${RUN}" + fi + done +fi + +# Archive required gaussian gfs forecast files for Fit2Obs +if [[ "${RUN}" == "gfs" ]] && [[ "${FITSARC}" = "YES" ]]; then + VFYARC=${VFYARC:-${ROTDIR}/vrfyarch} + [[ ! -d ${VFYARC} ]] && mkdir -p "${VFYARC}" + mkdir -p "${VFYARC}/${RUN}.${PDY}/${cyc}" + prefix="${RUN}.t${cyc}z" + fhmax=${FHMAX_FITS:-${FHMAX_GFS}} + fhr=0 + while [[ ${fhr} -le ${fhmax} ]]; do + fhr3=$(printf %03i "${fhr}") + sfcfile="${COM_ATMOS_MASTER}/${prefix}.sfcf${fhr3}.nc" + sigfile="${COM_ATMOS_MASTER}/${prefix}.atmf${fhr3}.nc" + nb_copy "${sfcfile}" "${VFYARC}/${RUN}.${PDY}/${cyc}/" + nb_copy "${sigfile}" "${VFYARC}/${RUN}.${PDY}/${cyc}/" + (( fhr = 10#${fhr} + 6 )) + done +fi + + +############################################################### +# Archive data either to HPSS or locally +if [[ ${HPSSARCH} = "YES" || ${LOCALARCH} = "YES" ]]; then +############################################################### + + # --set the archiving command and create local directories, if necessary + TARCMD="htar" + HSICMD="hsi" + if [[ ${LOCALARCH} = "YES" ]]; then + TARCMD="tar" + HSICMD='' + [[ ! -d "${ATARDIR}/${PDY}${cyc}" ]] && mkdir -p "${ATARDIR}/${PDY}${cyc}" + [[ ! -d "${ATARDIR}/${CDATE_MOS}" ]] && [[ -d "${ROTDIR}/gfsmos.${PDY_MOS}" ]] && [[ "${cyc}" -eq 18 ]] && mkdir -p "${ATARDIR}/${CDATE_MOS}" + fi + + #--determine when to save ICs for warm start and forecast-only runs + SAVEWARMICA="NO" + SAVEWARMICB="NO" + SAVEFCSTIC="NO" + firstday=$(${NDATE} +24 "${SDATE}") + mm="${PDY:2:2}" + dd="${PDY:4:2}" + # TODO: This math yields multiple dates sharing the same nday + nday=$(( (10#${mm}-1)*30+10#${dd} )) + mod=$((nday % ARCH_WARMICFREQ)) + if [[ "${PDY}${cyc}" -eq "${firstday}" ]] && [[ "${cyc}" -eq "${ARCHINC_CYC}" ]]; then SAVEWARMICA="YES" ; fi + if [[ "${PDY}${cyc}" -eq "${firstday}" ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]]; then SAVEWARMICB="YES" ; fi + if [[ "${mod}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHINC_CYC}" ]]; then SAVEWARMICA="YES" ; fi + if [[ "${mod}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]]; then SAVEWARMICB="YES" ; fi + + if [[ "${ARCHICS_CYC}" -eq 18 ]]; then + nday1=$((nday+1)) + mod1=$((nday1 % ARCH_WARMICFREQ)) + if [[ "${mod1}" -eq 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="YES" ; fi + if [[ "${mod1}" -ne 0 ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="NO" ; fi + if [[ "${PDY}${cyc}" -eq "${SDATE}" ]] && [[ "${cyc}" -eq "${ARCHICS_CYC}" ]] ; then SAVEWARMICB="YES" ; fi + fi + + mod=$((nday % ARCH_FCSTICFREQ)) + if [[ "${mod}" -eq 0 ]] || [[ "${PDY}${cyc}" -eq "${firstday}" ]]; then SAVEFCSTIC="YES" ; fi + + + ARCH_LIST="${DATA}/archlist" + [[ -d ${ARCH_LIST} ]] && rm -rf "${ARCH_LIST}" + mkdir -p "${ARCH_LIST}" + cd "${ARCH_LIST}" || exit 2 + + "${HOMEgfs}/ush/hpssarch_gen.sh" "${RUN}" + status=$? + if [ "${status}" -ne 0 ]; then + echo "${HOMEgfs}/ush/hpssarch_gen.sh ${RUN} failed, ABORT!" + exit "${status}" + fi + + cd "${ROTDIR}" || exit 2 + + if [[ "${RUN}" = "gfs" ]]; then + + targrp_list="gfsa gfsb" + + if [ "${ARCH_GAUSSIAN:-"NO"}" = "YES" ]; then + targrp_list="${targrp_list} gfs_flux gfs_netcdfb gfs_pgrb2b" + if [ "${MODE}" = "cycled" ]; then + targrp_list="${targrp_list} gfs_netcdfa" + fi + fi + + if [ "${DO_WAVE}" = "YES" ]; then + targrp_list="${targrp_list} gfswave" + fi + + if [ "${DO_OCN}" = "YES" ]; then + targrp_list="${targrp_list} ocn_ice_grib2_0p5 ocn_ice_grib2_0p25 ocn_2D ocn_3D ocn_xsect ocn_daily gfs_flux_1p00" + fi + + if [ "${DO_ICE}" = "YES" ]; then + targrp_list="${targrp_list} ice" + fi + + # Aerosols + if [ "${DO_AERO}" = "YES" ]; then + for targrp in chem; do + # TODO: Why is this tar being done here instead of being added to the list? + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${ARCH_LIST}/${targrp}.txt") + status=$? + if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then + echo "HTAR ${PDY}${cyc} ${targrp}.tar failed" + exit "${status}" + fi + done + fi + + #for restarts + if [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gfs_restarta" + fi + + #for downstream products + if [ "${DO_BUFRSND}" = "YES" ] || [ "${WAFSF}" = "YES" ]; then + targrp_list="${targrp_list} gfs_downstream" + fi + + #--save mdl gfsmos output from all cycles in the 18Z archive directory + if [[ -d "gfsmos.${PDY_MOS}" ]] && [[ "${cyc}" -eq 18 ]]; then + set +e + # TODO: Why is this tar being done here instead of being added to the list? + ${TARCMD} -P -cvf "${ATARDIR}/${CDATE_MOS}/gfsmos.tar" "./gfsmos.${PDY_MOS}" + status=$? + if [[ "${status}" -ne 0 ]] && [[ "${PDY}${cyc}" -ge "${firstday}" ]]; then + echo "${TARCMD^^} ${PDY}${cyc} gfsmos.tar failed" + exit "${status}" + fi + set_strict + fi + elif [[ "${RUN}" = "gdas" ]]; then + + targrp_list="gdas" + + #gdaswave + if [ "${DO_WAVE}" = "YES" ]; then + targrp_list="${targrp_list} gdaswave" + fi + + #gdasocean + if [ "${DO_OCN}" = "YES" ]; then + targrp_list="${targrp_list} gdasocean" + fi + + #gdasice + if [ "${DO_ICE}" = "YES" ]; then + targrp_list="${targrp_list} gdasice" + fi + + if [ "${SAVEWARMICA}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gdas_restarta" + if [ "${DO_WAVE}" = "YES" ]; then targrp_list="${targrp_list} gdaswave_restart"; fi + if [ "${DO_OCN}" = "YES" ]; then targrp_list="${targrp_list} gdasocean_restart"; fi + if [ "${DO_ICE}" = "YES" ]; then targrp_list="${targrp_list} gdasice_restart"; fi + fi + + if [ "${SAVEWARMICB}" = "YES" ] || [ "${SAVEFCSTIC}" = "YES" ]; then + targrp_list="${targrp_list} gdas_restartb" + fi + fi + + # Turn on extended globbing options + shopt -s extglob + for targrp in ${targrp_list}; do + set +e + ${TARCMD} -P -cvf "${ATARDIR}/${PDY}${cyc}/${targrp}.tar" $(cat "${ARCH_LIST}/${targrp}.txt") + status=$? + case ${targrp} in + 'gdas'|'gdas_restarta') + ${HSICMD} chgrp rstprod "${ATARDIR}/${CDATE}/${targrp}.tar" + ${HSICMD} chmod 640 "${ATARDIR}/${CDATE}/${targrp}.tar" + ;; + *) ;; + esac + if [ "${status}" -ne 0 ] && [ "${PDY}${cyc}" -ge "${firstday}" ]; then + echo "FATAL ERROR: ${TARCMD} ${PDY}${cyc} ${targrp}.tar failed" + exit "${status}" + fi + set_strict + done + # Turn extended globbing back off + shopt -u extglob + +############################################################### +fi ##end of HPSS archive +############################################################### + + + +############################################################### +# Clean up previous cycles; various depths +# PRIOR CYCLE: Leave the prior cycle alone +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") + +# PREVIOUS to the PRIOR CYCLE +GDATE=$(${NDATE} -"${assim_freq}" "${GDATE}") +gPDY="${GDATE:0:8}" +gcyc="${GDATE:8:2}" + +# Remove the TMPDIR directory +# TODO Only prepbufr is currently using this directory, and all jobs should be +# cleaning up after themselves anyway +COMIN="${DATAROOT}/${GDATE}" +[[ -d ${COMIN} ]] && rm -rf "${COMIN}" + +if [[ "${DELETE_COM_IN_ARCHIVE_JOB:-YES}" == NO ]] ; then + exit 0 +fi + +# Step back every assim_freq hours and remove old rotating directories +# for successful cycles (defaults from 24h to 120h). +# Retain files needed by Fit2Obs +# TODO: This whole section needs to be revamped to remove marine component +# directories and not look at the rocoto log. +GDATEEND=$(${NDATE} -"${RMOLDEND:-24}" "${PDY}${cyc}") +GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}") +RTOFS_DATE=$(${NDATE} -48 "${PDY}${cyc}") +function remove_files() { + # TODO: move this to a new location + local directory=$1 + shift + if [[ ! -d ${directory} ]]; then + echo "No directory ${directory} to remove files from, skiping" + return + fi + local exclude_list="" + if (($# > 0)); then + exclude_list=$* + fi + local file_list + declare -a file_list + readarray -t file_list < <(find -L "${directory}" -type f) + if (( ${#file_list[@]} == 0 )); then return; fi + # echo "Number of files to remove before exclusions: ${#file_list[@]}" + for exclude in ${exclude_list}; do + echo "Excluding ${exclude}" + declare -a file_list_old=("${file_list[@]}") + readarray file_list < <(printf -- '%s\n' "${file_list_old[@]}" | grep -v "${exclude}") + # echo "Number of files to remove after exclusion: ${#file_list[@]}" + if (( ${#file_list[@]} == 0 )); then return; fi + done + # echo "Number of files to remove after exclusions: ${#file_list[@]}" + + for file in "${file_list[@]}"; do + rm -f "${file}" + done + # Remove directory if empty + rmdir "${directory}" || true +} + +while [ "${GDATE}" -le "${GDATEEND}" ]; do + gPDY="${GDATE:0:8}" + gcyc="${GDATE:8:2}" + COMINrtofs="${ROTDIR}/rtofs.${gPDY}" + if [ -d "${COM_TOP}" ]; then + rocotolog="${EXPDIR}/logs/${GDATE}.log" + if [ -f "${rocotolog}" ]; then + set +e + testend=$(tail -n 1 "${rocotolog}" | grep "This cycle is complete: Success") + rc=$? + set_strict + + if [ "${rc}" -eq 0 ]; then + # Obs + exclude_list="prepbufr" + templates="COM_OBS" + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Atmos + exclude_list="cnvstat atmanl.nc" + templates=$(compgen -A variable | grep 'COM_ATMOS_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Wave + exclude_list="" + templates=$(compgen -A variable | grep 'COM_WAVE_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Ocean + exclude_list="" + templates=$(compgen -A variable | grep 'COM_OCEAN_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Ice + exclude_list="" + templates=$(compgen -A variable | grep 'COM_ICE_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Aerosols (GOCART) + exclude_list="" + templates=$(compgen -A variable | grep 'COM_CHEM_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + # Mediator + exclude_list="" + templates=$(compgen -A variable | grep 'COM_MED_.*_TMPL') + for template in ${templates}; do + YMD="${gPDY}" HH="${gcyc}" generate_com "directory:${template}" + remove_files "${directory}" "${exclude_list[@]}" + done + + if [ -d "${COMINrtofs}" ] && [ "${GDATE}" -lt "${RTOFS_DATE}" ]; then rm -rf "${COMINrtofs}" ; fi + fi + fi + fi + + # Remove mdl gfsmos directory + if [ "${RUN}" = "gfs" ]; then + COMIN="${ROTDIR}/gfsmos.${gPDY}" + if [ -d "${COMIN}" ] && [ "${GDATE}" -lt "${CDATE_MOS}" ]; then rm -rf "${COMIN}" ; fi + fi + + # Remove any empty directories + target_dir="${ROTDIR:?}/${RUN}.${gPDY}/${gcyc}/" + if [[ -d ${target_dir} ]]; then + find "${target_dir}" -empty -type d -delete + fi + + GDATE=$(${NDATE} +"${assim_freq}" "${GDATE}") +done + +# Remove archived gaussian files used for Fit2Obs in $VFYARC that are +# $FHMAX_FITS plus a delta before $CDATE. Touch existing archived +# gaussian files to prevent the files from being removed by automatic +# scrubber present on some machines. + +if [ "${RUN}" = "gfs" ]; then + fhmax=$((FHMAX_FITS+36)) + RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") + rPDY=$(echo "${RDATE}" | cut -c1-8) + COMIN="${VFYARC}/${RUN}.${rPDY}" + [[ -d ${COMIN} ]] && rm -rf "${COMIN}" + + TDATE=$(${NDATE} -"${FHMAX_FITS}" "${PDY}${cyc}") + while [ "${TDATE}" -lt "${PDY}${cyc}" ]; do + tPDY=$(echo "${TDATE}" | cut -c1-8) + tcyc=$(echo "${TDATE}" | cut -c9-10) + TDIR=${VFYARC}/${RUN}.${tPDY}/${tcyc} + [[ -d ${TDIR} ]] && touch "${TDIR}"/* + TDATE=$(${NDATE} +6 "${TDATE}") + done +fi + +# Remove $RUN.$rPDY for the older of GDATE or RDATE +GDATE=$(${NDATE} -"${RMOLDSTD:-120}" "${PDY}${cyc}") +fhmax=${FHMAX_GFS} +RDATE=$(${NDATE} -"${fhmax}" "${PDY}${cyc}") +if [ "${GDATE}" -lt "${RDATE}" ]; then + RDATE=${GDATE} +fi +rPDY=$(echo "${RDATE}" | cut -c1-8) +COMIN="${ROTDIR}/${RUN}.${rPDY}" +[[ -d ${COMIN} ]] && rm -rf "${COMIN}" + + +############################################################### + + +exit 0 diff --git a/scripts/exglobal_atm_analysis_finalize.py b/scripts/exglobal_atm_analysis_finalize.py new file mode 100755 index 0000000000..e51bf082b5 --- /dev/null +++ b/scripts/exglobal_atm_analysis_finalize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atm_analysis_finalize.py +# This script creates an AtmAnalysis class +# and runs the finalize method +# which perform post-processing and clean up activities +# for a global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.finalize() diff --git a/scripts/exglobal_atm_analysis_initialize.py b/scripts/exglobal_atm_analysis_initialize.py new file mode 100755 index 0000000000..e0077f3323 --- /dev/null +++ b/scripts/exglobal_atm_analysis_initialize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atm_analysis_initialize.py +# This script creates an AtmAnalysis class +# and runs the initialize method +# which create and stage the runtime directory +# and create the YAML configuration +# for a global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.initialize() diff --git a/scripts/exglobal_atm_analysis_run.py b/scripts/exglobal_atm_analysis_run.py new file mode 100755 index 0000000000..6b29a56976 --- /dev/null +++ b/scripts/exglobal_atm_analysis_run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exglobal_atm_analysis_run.py +# This script creates an AtmAnalysis object +# and runs the execute method +# which executes the global atm variational analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atm_analysis import AtmAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atm analysis task + AtmAnl = AtmAnalysis(config) + AtmAnl.execute() diff --git a/scripts/exglobal_atmens_analysis_finalize.py b/scripts/exglobal_atmens_analysis_finalize.py new file mode 100755 index 0000000000..7bac671aee --- /dev/null +++ b/scripts/exglobal_atmens_analysis_finalize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_finalize.py +# This script creates an AtmEnsAnalysis class +# and runs the finalize method +# which perform post-processing and clean up activities +# for a global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.finalize() diff --git a/scripts/exglobal_atmens_analysis_initialize.py b/scripts/exglobal_atmens_analysis_initialize.py new file mode 100755 index 0000000000..1461e0b441 --- /dev/null +++ b/scripts/exglobal_atmens_analysis_initialize.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_initialize.py +# This script creates an AtmEnsAnalysis class +# and runs the initialize method +# which create and stage the runtime directory +# and create the YAML configuration +# for a global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.initialize() diff --git a/scripts/exglobal_atmens_analysis_run.py b/scripts/exglobal_atmens_analysis_run.py new file mode 100755 index 0000000000..dda4f7a11d --- /dev/null +++ b/scripts/exglobal_atmens_analysis_run.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +# exglobal_atmens_analysis_run.py +# This script creates an AtmEnsAnalysis object +# and runs the execute method +# which executes the global atm local ensemble analysis +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.atmens_analysis import AtmEnsAnalysis + +# Initialize root logger +logger = Logger(level='DEBUG', colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the atmens analysis task + AtmEnsAnl = AtmEnsAnalysis(config) + AtmEnsAnl.execute() diff --git a/scripts/exglobal_atmos_analysis.sh b/scripts/exglobal_atmos_analysis.sh new file mode 100755 index 0000000000..f81f7f0a33 --- /dev/null +++ b/scripts/exglobal_atmos_analysis.sh @@ -0,0 +1,985 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_atmos_analysis.sh +# Script description: Makes a global model upper air analysis with GSI +# +# Author: Rahul Mahajan Org: NCEP/EMC Date: 2017-03-02 +# +# Abstract: This script makes a global model analysis using the GSI +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################# + +# Set environment. + +source "${HOMEgfs}/ush/preamble.sh" + +# Directories. +pwd=$(pwd) + +# Base variables +CDATE=${CDATE:-"2001010100"} +CDUMP=${CDUMP:-"gdas"} +GDUMP=${GDUMP:-"gdas"} + +# Derived base variables +GDATE=$(${NDATE} -${assim_freq} ${CDATE}) +BDATE=$(${NDATE} -3 ${CDATE}) +PDY=$(echo ${CDATE} | cut -c1-8) +cyc=$(echo ${CDATE} | cut -c9-10) +bPDY=$(echo ${BDATE} | cut -c1-8) +bcyc=$(echo ${BDATE} | cut -c9-10) + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-${HOMEgfs}/ush/getncdimlen} +COMPRESS=${COMPRESS:-gzip} +UNCOMPRESS=${UNCOMPRESS:-gunzip} +APRUNCFP=${APRUNCFP:-""} +APRUN_GSI=${APRUN_GSI:-${APRUN:-""}} +NTHREADS_GSI=${NTHREADS_GSI:-${NTHREADS:-1}} + +# Microphysics in the model; 99:ZC, 11:GFDLMP +export imp_physics=${imp_physics:-99} +lupp=${lupp:-".true."} +cnvw_option=${cnvw_option:-".false."} + +# Observation usage options +cao_check=${cao_check:-".true."} +ta2tb=${ta2tb:-".true."} + +# Diagnostic files options +lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} +netcdf_diag=${netcdf_diag:-".true."} +binary_diag=${binary_diag:-".false."} + +# IAU +DOIAU=${DOIAU:-"NO"} +export IAUFHRS=${IAUFHRS:-"6"} + +# Dependent Scripts and Executables +GSIEXEC=${GSIEXEC:-${HOMEgfs}/exec/gsi.x} +export NTHREADS_CALCINC=${NTHREADS_CALCINC:-1} +export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} +export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}} +export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}} +export CALCINCEXEC=${CALCINCEXEC:-${HOMEgfs}/exec/calc_increment_ens.x} +export CALCINCNCEXEC=${CALCINCNCEXEC:-${HOMEgfs}/exec/calc_increment_ens_ncio.x} +export CALCANLEXEC=${CALCANLEXEC:-${HOMEgfs}/exec/calc_analysis.x} +export CHGRESNCEXEC=${CHGRESNCEXEC:-${HOMEgfs}/exec/enkf_chgres_recenter_nc.x} +export CHGRESINCEXEC=${CHGRESINCEXEC:-${HOMEgfs}/exec/interp_inc.x} +CHGRESEXEC=${CHGRESEXEC:-${HOMEgfs}/exec/enkf_chgres_recenter.x} +export NTHREADS_CHGRES=${NTHREADS_CHGRES:-24} +CALCINCPY=${CALCINCPY:-${HOMEgfs}/ush/calcinc_gfs.py} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} +RUN_GETGES=${RUN_GETGES:-"NO"} +GETGESSH=${GETGESSH:-"getges.sh"} +export gesenvir=${gesenvir:-${envir}} + +# Observations +OPREFIX=${OPREFIX:-""} +OSUFFIX=${OSUFFIX:-""} +PREPQC=${PREPQC:-${COM_OBS}/${OPREFIX}prepbufr${OSUFFIX}} +PREPQCPF=${PREPQCPF:-${COM_OBS}/${OPREFIX}prepbufr.acft_profiles${OSUFFIX}} +NSSTBF=${NSSTBF:-${COM_OBS}/${OPREFIX}nsstbufr${OSUFFIX}} +SATWND=${SATWND:-${COM_OBS}/${OPREFIX}satwnd.tm00.bufr_d${OSUFFIX}} +OSCATBF=${OSCATBF:-${COM_OBS}/${OPREFIX}oscatw.tm00.bufr_d${OSUFFIX}} +RAPIDSCATBF=${RAPIDSCATBF:-${COM_OBS}/${OPREFIX}rapidscatw.tm00.bufr_d${OSUFFIX}} +GSNDBF=${GSNDBF:-${COM_OBS}/${OPREFIX}goesnd.tm00.bufr_d${OSUFFIX}} +GSNDBF1=${GSNDBF1:-${COM_OBS}/${OPREFIX}goesfv.tm00.bufr_d${OSUFFIX}} +B1HRS2=${B1HRS2:-${COM_OBS}/${OPREFIX}1bhrs2.tm00.bufr_d${OSUFFIX}} +B1MSU=${B1MSU:-${COM_OBS}/${OPREFIX}1bmsu.tm00.bufr_d${OSUFFIX}} +B1HRS3=${B1HRS3:-${COM_OBS}/${OPREFIX}1bhrs3.tm00.bufr_d${OSUFFIX}} +B1HRS4=${B1HRS4:-${COM_OBS}/${OPREFIX}1bhrs4.tm00.bufr_d${OSUFFIX}} +B1AMUA=${B1AMUA:-${COM_OBS}/${OPREFIX}1bamua.tm00.bufr_d${OSUFFIX}} +B1AMUB=${B1AMUB:-${COM_OBS}/${OPREFIX}1bamub.tm00.bufr_d${OSUFFIX}} +B1MHS=${B1MHS:-${COM_OBS}/${OPREFIX}1bmhs.tm00.bufr_d${OSUFFIX}} +ESHRS3=${ESHRS3:-${COM_OBS}/${OPREFIX}eshrs3.tm00.bufr_d${OSUFFIX}} +ESAMUA=${ESAMUA:-${COM_OBS}/${OPREFIX}esamua.tm00.bufr_d${OSUFFIX}} +ESAMUB=${ESAMUB:-${COM_OBS}/${OPREFIX}esamub.tm00.bufr_d${OSUFFIX}} +ESMHS=${ESMHS:-${COM_OBS}/${OPREFIX}esmhs.tm00.bufr_d${OSUFFIX}} +HRS3DB=${HRS3DB:-${COM_OBS}/${OPREFIX}hrs3db.tm00.bufr_d${OSUFFIX}} +AMUADB=${AMUADB:-${COM_OBS}/${OPREFIX}amuadb.tm00.bufr_d${OSUFFIX}} +AMUBDB=${AMUBDB:-${COM_OBS}/${OPREFIX}amubdb.tm00.bufr_d${OSUFFIX}} +MHSDB=${MHSDB:-${COM_OBS}/${OPREFIX}mhsdb.tm00.bufr_d${OSUFFIX}} +AIRSBF=${AIRSBF:-${COM_OBS}/${OPREFIX}airsev.tm00.bufr_d${OSUFFIX}} +IASIBF=${IASIBF:-${COM_OBS}/${OPREFIX}mtiasi.tm00.bufr_d${OSUFFIX}} +ESIASI=${ESIASI:-${COM_OBS}/${OPREFIX}esiasi.tm00.bufr_d${OSUFFIX}} +IASIDB=${IASIDB:-${COM_OBS}/${OPREFIX}iasidb.tm00.bufr_d${OSUFFIX}} +AMSREBF=${AMSREBF:-${COM_OBS}/${OPREFIX}amsre.tm00.bufr_d${OSUFFIX}} +AMSR2BF=${AMSR2BF:-${COM_OBS}/${OPREFIX}amsr2.tm00.bufr_d${OSUFFIX}} +GMI1CRBF=${GMI1CRBF:-${COM_OBS}/${OPREFIX}gmi1cr.tm00.bufr_d${OSUFFIX}} # GMI temporarily disabled due to array overflow. +SAPHIRBF=${SAPHIRBF:-${COM_OBS}/${OPREFIX}saphir.tm00.bufr_d${OSUFFIX}} +SEVIRIBF=${SEVIRIBF:-${COM_OBS}/${OPREFIX}sevcsr.tm00.bufr_d${OSUFFIX}} +AHIBF=${AHIBF:-${COM_OBS}/${OPREFIX}ahicsr.tm00.bufr_d${OSUFFIX}} +SSTVIIRS=${SSTVIIRS:-${COM_OBS}/${OPREFIX}sstvcw.tm00.bufr_d${OSUFFIX}} +ABIBF=${ABIBF:-${COM_OBS}/${OPREFIX}gsrcsr.tm00.bufr_d${OSUFFIX}} +CRISBF=${CRISBF:-${COM_OBS}/${OPREFIX}cris.tm00.bufr_d${OSUFFIX}} +ESCRIS=${ESCRIS:-${COM_OBS}/${OPREFIX}escris.tm00.bufr_d${OSUFFIX}} +CRISDB=${CRISDB:-${COM_OBS}/${OPREFIX}crisdb.tm00.bufr_d${OSUFFIX}} +CRISFSBF=${CRISFSBF:-${COM_OBS}/${OPREFIX}crisf4.tm00.bufr_d${OSUFFIX}} +ESCRISFS=${ESCRISFS:-${COM_OBS}/${OPREFIX}escrsf.tm00.bufr_d${OSUFFIX}} +CRISFSDB=${CRISFSDB:-${COM_OBS}/${OPREFIX}crsfdb.tm00.bufr_d${OSUFFIX}} +ATMSBF=${ATMSBF:-${COM_OBS}/${OPREFIX}atms.tm00.bufr_d${OSUFFIX}} +ESATMS=${ESATMS:-${COM_OBS}/${OPREFIX}esatms.tm00.bufr_d${OSUFFIX}} +ATMSDB=${ATMSDB:-${COM_OBS}/${OPREFIX}atmsdb.tm00.bufr_d${OSUFFIX}} +SSMITBF=${SSMITBF:-${COM_OBS}/${OPREFIX}ssmit.tm00.bufr_d${OSUFFIX}} +SSMISBF=${SSMISBF:-${COM_OBS}/${OPREFIX}ssmisu.tm00.bufr_d${OSUFFIX}} +SBUVBF=${SBUVBF:-${COM_OBS}/${OPREFIX}osbuv8.tm00.bufr_d${OSUFFIX}} +OMPSNPBF=${OMPSNPBF:-${COM_OBS}/${OPREFIX}ompsn8.tm00.bufr_d${OSUFFIX}} +OMPSTCBF=${OMPSTCBF:-${COM_OBS}/${OPREFIX}ompst8.tm00.bufr_d${OSUFFIX}} +OMPSLPBF=${OMPSLPBF:-${COM_OBS}/${OPREFIX}ompslp.tm00.bufr_d${OSUFFIX}} +GOMEBF=${GOMEBF:-${COM_OBS}/${OPREFIX}gome.tm00.bufr_d${OSUFFIX}} +OMIBF=${OMIBF:-${COM_OBS}/${OPREFIX}omi.tm00.bufr_d${OSUFFIX}} +MLSBF=${MLSBF:-${COM_OBS}/${OPREFIX}mls.tm00.bufr_d${OSUFFIX}} +SMIPCP=${SMIPCP:-${COM_OBS}/${OPREFIX}spssmi.tm00.bufr_d${OSUFFIX}} +TMIPCP=${TMIPCP:-${COM_OBS}/${OPREFIX}sptrmm.tm00.bufr_d${OSUFFIX}} +GPSROBF=${GPSROBF:-${COM_OBS}/${OPREFIX}gpsro.tm00.bufr_d${OSUFFIX}} +TCVITL=${TCVITL:-${COM_OBS}/${OPREFIX}syndata.tcvitals.tm00} +B1AVHAM=${B1AVHAM:-${COM_OBS}/${OPREFIX}avcsam.tm00.bufr_d${OSUFFIX}} +B1AVHPM=${B1AVHPM:-${COM_OBS}/${OPREFIX}avcspm.tm00.bufr_d${OSUFFIX}} +HDOB=${HDOB:-${COM_OBS}/${OPREFIX}hdob.tm00.bufr_d${OSUFFIX}} + +# Guess files +GPREFIX=${GPREFIX:-""} +GSUFFIX=${GSUFFIX:-".nc"} +SFCG03=${SFCG03:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf003${GSUFFIX}} +SFCG04=${SFCG04:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf004${GSUFFIX}} +SFCG05=${SFCG05:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf005${GSUFFIX}} +SFCGES=${SFCGES:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf006${GSUFFIX}} +SFCG07=${SFCG07:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf007${GSUFFIX}} +SFCG08=${SFCG08:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf008${GSUFFIX}} +SFCG09=${SFCG09:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}sfcf009${GSUFFIX}} +ATMG03=${ATMG03:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf003${GSUFFIX}} +ATMG04=${ATMG04:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf004${GSUFFIX}} +ATMG05=${ATMG05:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf005${GSUFFIX}} +ATMGES=${ATMGES:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006${GSUFFIX}} +ATMG07=${ATMG07:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf007${GSUFFIX}} +ATMG08=${ATMG08:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf008${GSUFFIX}} +ATMG09=${ATMG09:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf009${GSUFFIX}} +GBIAS=${GBIAS:-${COM_ATMOS_ANALYSIS_PREV}/${GPREFIX}abias} +GBIASPC=${GBIASPC:-${COM_ATMOS_ANALYSIS_PREV}/${GPREFIX}abias_pc} +GBIASAIR=${GBIASAIR:-${COM_ATMOS_ANALYSIS_PREV}/${GPREFIX}abias_air} +GRADSTAT=${GRADSTAT:-${COM_ATMOS_ANALYSIS_PREV}/${GPREFIX}radstat} + +# Analysis files +export APREFIX=${APREFIX:-""} +SFCANL=${SFCANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}sfcanl.nc} +DTFANL=${DTFANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc} +ATMANL=${ATMANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmanl.nc} +ABIAS=${ABIAS:-${COM_ATMOS_ANALYSIS}/${APREFIX}abias} +ABIASPC=${ABIASPC:-${COM_ATMOS_ANALYSIS}/${APREFIX}abias_pc} +ABIASAIR=${ABIASAIR:-${COM_ATMOS_ANALYSIS}/${APREFIX}abias_air} +ABIASe=${ABIASe:-${COM_ATMOS_ANALYSIS}/${APREFIX}abias_int} +RADSTAT=${RADSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}radstat} +GSISTAT=${GSISTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}gsistat} +PCPSTAT=${PCPSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}pcpstat} +CNVSTAT=${CNVSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat} + +# Increment files +ATMINC=${ATMINC:-${COM_ATMOS_ANALYSIS}/${APREFIX}atminc.nc} + +# Obs diag +RUN_SELECT=${RUN_SELECT:-"NO"} +USE_SELECT=${USE_SELECT:-"NO"} +USE_RADSTAT=${USE_RADSTAT:-"YES"} +SELECT_OBS=${SELECT_OBS:-${COM_ATMOS_ANALYSIS}/${APREFIX}obsinput} +GENDIAG=${GENDIAG:-"YES"} +DIAG_SUFFIX=${DIAG_SUFFIX:-""} +if [ ${netcdf_diag} = ".true." ] ; then + DIAG_SUFFIX="${DIAG_SUFFIX}.nc4" +fi +DIAG_COMPRESS=${DIAG_COMPRESS:-"YES"} +DIAG_TARBALL=${DIAG_TARBALL:-"YES"} +USE_CFP=${USE_CFP:-"NO"} +CFP_MP=${CFP_MP:-"NO"} +nm="" +if [ ${CFP_MP} = "YES" ]; then + nm=0 +fi +DIAG_DIR=${DIAG_DIR:-${COM_ATMOS_ANALYSIS}/gsidiags} + +# Set script / GSI control parameters +DOHYBVAR=${DOHYBVAR:-"NO"} +NMEM_ENS=${NMEM_ENS:-0} +export DONST=${DONST:-"NO"} +NST_GSI=${NST_GSI:-0} +NSTINFO=${NSTINFO:-0} +ZSEA1=${ZSEA1:-0} +ZSEA2=${ZSEA2:-0} +FAC_DTL=${FAC_DTL:-1} +FAC_TSL=${FAC_TSL:-1} +TZR_QC=${TZR_QC:-1} +USE_READIN_ANL_SFCMASK=${USE_READIN_ANL_SFCMASK:-.false.} +SMOOTH_ENKF=${SMOOTH_ENKF:-"YES"} +export DOIAU=${DOIAU:-"NO"} +DO_CALC_INCREMENT=${DO_CALC_INCREMENT:-"NO"} +DO_CALC_ANALYSIS=${DO_CALC_ANALYSIS:-"NO"} +export INCREMENTS_TO_ZERO=${INCREMENTS_TO_ZERO:-"'NONE'"} +USE_CORRELATED_OBERRS=${USE_CORRELATED_OBERRS:-"YES"} + +# Get header information from Guess files +LONB=${LONB:-$(${NCLEN} ${ATMGES} grid_xt)} # get LONB +LATB=${LATB:-$(${NCLEN} ${ATMGES} grid_yt)} # get LATB +LEVS=${LEVS:-$(${NCLEN} ${ATMGES} pfull)} # get LEVS +JCAP=${JCAP:--9999} # there is no jcap in these files +[ ${JCAP} -eq -9999 -a ${LATB} -ne -9999 ] && JCAP=$((LATB-2)) +[ ${LONB} -eq -9999 -o ${LATB} -eq -9999 -o ${LEVS} -eq -9999 -o ${JCAP} -eq -9999 ] && exit -9999 + +# Get header information from Ensemble Guess files +if [ ${DOHYBVAR} = "YES" ]; then + SFCGES_ENSMEAN=${SFCGES_ENSMEAN:-${COM_ATMOS_HISTORY_ENS_PREV}/${GPREFIX_ENS}sfcf006.ensmean.nc} + export ATMGES_ENSMEAN=${ATMGES_ENSMEAN:-${COM_ATMOS_HISTORY_ENS_PREV}/${GPREFIX_ENS}atmf006.ensmean.nc} + LONB_ENKF=${LONB_ENKF:-$(${NCLEN} ${ATMGES_ENSMEAN} grid_xt)} # get LONB_ENKF + LATB_ENKF=${LATB_ENKF:-$(${NCLEN} ${ATMGES_ENSMEAN} grid_yt)} # get LATB_ENFK + LEVS_ENKF=${LEVS_ENKF:-$(${NCLEN} ${ATMGES_ENSMEAN} pfull)} # get LATB_ENFK + JCAP_ENKF=${JCAP_ENKF:--9999} # again, no jcap in the netcdf files + NLON_ENKF=${NLON_ENKF:-${LONB_ENKF}} + NLAT_ENKF=${NLAT_ENKF:-$((${LATB_ENKF}+2))} + [ ${JCAP_ENKF} -eq -9999 -a ${LATB_ENKF} -ne -9999 ] && JCAP_ENKF=$((LATB_ENKF-2)) + [ ${LONB_ENKF} -eq -9999 -o ${LATB_ENKF} -eq -9999 -o ${LEVS_ENKF} -eq -9999 -o ${JCAP_ENKF} -eq -9999 ] && exit -9999 +else + LONB_ENKF=0 # just for if statement later +fi + +# Get dimension information based on CASE +res=$(echo ${CASE} | cut -c2-) +JCAP_CASE=$((res*2-2)) +LATB_CASE=$((res*2)) +LONB_CASE=$((res*4)) + +# Set analysis resolution information +if [ ${DOHYBVAR} = "YES" ]; then + JCAP_A=${JCAP_A:-${JCAP_ENKF:-${JCAP}}} + LONA=${LONA:-${LONB_ENKF:-${LONB}}} + LATA=${LATA:-${LATB_ENKF:-${LATB}}} +else + JCAP_A=${JCAP_A:-${JCAP}} + LONA=${LONA:-${LONB}} + LATA=${LATA:-${LATB}} +fi +NLON_A=${NLON_A:-${LONA}} +NLAT_A=${NLAT_A:-$((${LATA}+2))} + +DELTIM=${DELTIM:-$((3600/(${JCAP_A}/20)))} + +# determine if writing or calculating increment +if [ ${DO_CALC_INCREMENT} = "YES" ]; then + write_fv3_increment=".false." +else + write_fv3_increment=".true." + WRITE_INCR_ZERO="incvars_to_zero= ${INCREMENTS_TO_ZERO}," + WRITE_ZERO_STRAT="incvars_zero_strat= ${INCVARS_ZERO_STRAT}," + WRITE_STRAT_EFOLD="incvars_efold= ${INCVARS_EFOLD}," +fi + +# GSI Fix files +RTMFIX=${CRTM_FIX} +BERROR=${BERROR:-${FIXgsi}/Big_Endian/global_berror.l${LEVS}y${NLAT_A}.f77} +SATANGL=${SATANGL:-${FIXgsi}/global_satangbias.txt} +SATINFO=${SATINFO:-${FIXgsi}/global_satinfo.txt} +RADCLOUDINFO=${RADCLOUDINFO:-${FIXgsi}/cloudy_radiance_info.txt} +ATMSFILTER=${ATMSFILTER:-${FIXgsi}/atms_beamwidth.txt} +ANAVINFO=${ANAVINFO:-${FIXgsi}/global_anavinfo.l${LEVS}.txt} +CONVINFO=${CONVINFO:-${FIXgsi}/global_convinfo.txt} +vqcdat=${vqcdat:-${FIXgsi}/vqctp001.dat} +INSITUINFO=${INSITUINFO:-${FIXgsi}/global_insituinfo.txt} +OZINFO=${OZINFO:-${FIXgsi}/global_ozinfo.txt} +PCPINFO=${PCPINFO:-${FIXgsi}/global_pcpinfo.txt} +AEROINFO=${AEROINFO:-${FIXgsi}/global_aeroinfo.txt} +SCANINFO=${SCANINFO:-${FIXgsi}/global_scaninfo.txt} +HYBENSINFO=${HYBENSINFO:-${FIXgsi}/global_hybens_info.l${LEVS}.txt} +OBERROR=${OBERROR:-${FIXgsi}/prepobs_errtable.global} + +# GSI namelist +SETUP=${SETUP:-""} +GRIDOPTS=${GRIDOPTS:-""} +BKGVERR=${BKGVERR:-""} +ANBKGERR=${ANBKGERR:-""} +JCOPTS=${JCOPTS:-""} +STRONGOPTS=${STRONGOPTS:-""} +OBSQC=${OBSQC:-""} +OBSINPUT=${OBSINPUT:-""} +SUPERRAD=${SUPERRAD:-""} +SINGLEOB=${SINGLEOB:-""} +LAGDATA=${LAGDATA:-""} +HYBRID_ENSEMBLE=${HYBRID_ENSEMBLE:-""} +RAPIDREFRESH_CLDSURF=${RAPIDREFRESH_CLDSURF:-""} +CHEM=${CHEM:-""} +NST=${NST:-""} + +#uGSI Namelist parameters +lrun_subdirs=${lrun_subdirs:-".true."} +if [ ${DOHYBVAR} = "YES" ]; then + l_hyb_ens=.true. + export l4densvar=${l4densvar:-".false."} + export lwrite4danl=${lwrite4danl:-".false."} +else + l_hyb_ens=.false. + export l4densvar=.false. + export lwrite4danl=.false. +fi + +# Set 4D-EnVar specific variables +if [ ${DOHYBVAR} = "YES" -a ${l4densvar} = ".true." -a ${lwrite4danl} = ".true." ]; then + ATMA03=${ATMA03:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma003.nc} + ATMI03=${ATMI03:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi003.nc} + ATMA04=${ATMA04:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma004.nc} + ATMI04=${ATMI04:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi004.nc} + ATMA05=${ATMA05:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma005.nc} + ATMI05=${ATMI05:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi005.nc} + ATMA07=${ATMA07:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma007.nc} + ATMI07=${ATMI07:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi007.nc} + ATMA08=${ATMA08:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma008.nc} + ATMI08=${ATMI08:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi008.nc} + ATMA09=${ATMA09:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma009.nc} + ATMI09=${ATMI09:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi009.nc} +fi + +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d ${DATA} ]; then + mkdata=YES + mkdir -p ${DATA} +fi + +cd ${DATA} || exit 99 + +############################################################## +# Fixed files +${NLN} ${BERROR} berror_stats +${NLN} ${SATANGL} satbias_angle +${NLN} ${SATINFO} satinfo +${NLN} ${RADCLOUDINFO} cloudy_radiance_info.txt +${NLN} ${ATMSFILTER} atms_beamwidth.txt +${NLN} ${ANAVINFO} anavinfo +${NLN} ${CONVINFO} convinfo +${NLN} ${vqcdat} vqctp001.dat +${NLN} ${INSITUINFO} insituinfo +${NLN} ${OZINFO} ozinfo +${NLN} ${PCPINFO} pcpinfo +${NLN} ${AEROINFO} aeroinfo +${NLN} ${SCANINFO} scaninfo +${NLN} ${HYBENSINFO} hybens_info +${NLN} ${OBERROR} errtable + +#If using correlated error, link to the covariance files +if [ ${USE_CORRELATED_OBERRS} == "YES" ]; then + if grep -q "Rcov" ${ANAVINFO} ; then + if ls ${FIXgsi}/Rcov* 1> /dev/null 2>&1; then + ${NLN} ${FIXgsi}/Rcov* ${DATA} + echo "using correlated obs error" + else + echo "FATAL ERROR: Satellite error covariance files (Rcov) are missing." + echo "Check for the required Rcov files in " ${ANAVINFO} + exit 1 + fi + else + echo "FATAL ERROR: Satellite error covariance info missing in " ${ANAVINFO} + exit 1 + fi + +# Correlated error utlizes mkl lapack. Found it necesary to fix the +# number of mkl threads to ensure reproducible results independent +# of the job configuration. + export MKL_NUM_THREADS=1 + +else + echo "not using correlated obs error" +fi + +############################################################## +# CRTM Spectral and Transmittance coefficients +mkdir -p crtm_coeffs +for file in $(awk '{if($1!~"!"){print $1}}' satinfo | sort | uniq); do + ${NLN} ${RTMFIX}/${file}.SpcCoeff.bin ./crtm_coeffs/${file}.SpcCoeff.bin + ${NLN} ${RTMFIX}/${file}.TauCoeff.bin ./crtm_coeffs/${file}.TauCoeff.bin +done +${NLN} ${RTMFIX}/amsua_metop-a_v2.SpcCoeff.bin ./crtm_coeffs/amsua_metop-a_v2.SpcCoeff.bin + +${NLN} ${RTMFIX}/Nalli.IRwater.EmisCoeff.bin ./crtm_coeffs/Nalli.IRwater.EmisCoeff.bin +${NLN} ${RTMFIX}/NPOESS.IRice.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRice.EmisCoeff.bin +${NLN} ${RTMFIX}/NPOESS.IRland.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRland.EmisCoeff.bin +${NLN} ${RTMFIX}/NPOESS.IRsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.IRsnow.EmisCoeff.bin +${NLN} ${RTMFIX}/NPOESS.VISice.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISice.EmisCoeff.bin +${NLN} ${RTMFIX}/NPOESS.VISland.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISland.EmisCoeff.bin +${NLN} ${RTMFIX}/NPOESS.VISsnow.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISsnow.EmisCoeff.bin +${NLN} ${RTMFIX}/NPOESS.VISwater.EmisCoeff.bin ./crtm_coeffs/NPOESS.VISwater.EmisCoeff.bin +${NLN} ${RTMFIX}/FASTEM6.MWwater.EmisCoeff.bin ./crtm_coeffs/FASTEM6.MWwater.EmisCoeff.bin +${NLN} ${RTMFIX}/AerosolCoeff.bin ./crtm_coeffs/AerosolCoeff.bin +${NLN} ${RTMFIX}/CloudCoeff.GFDLFV3.-109z-1.bin ./crtm_coeffs/CloudCoeff.bin + +############################################################## +# Observational data +${NLN} ${PREPQC} prepbufr +${NLN} ${PREPQCPF} prepbufr_profl +${NLN} ${SATWND} satwndbufr +${NLN} ${OSCATBF} oscatbufr +${NLN} ${RAPIDSCATBF} rapidscatbufr +${NLN} ${GSNDBF} gsndrbufr +${NLN} ${GSNDBF1} gsnd1bufr +${NLN} ${B1HRS2} hirs2bufr +${NLN} ${B1MSU} msubufr +${NLN} ${B1HRS3} hirs3bufr +${NLN} ${B1HRS4} hirs4bufr +${NLN} ${B1AMUA} amsuabufr +${NLN} ${B1AMUB} amsubbufr +${NLN} ${B1MHS} mhsbufr +${NLN} ${ESHRS3} hirs3bufrears +${NLN} ${ESAMUA} amsuabufrears +${NLN} ${ESAMUB} amsubbufrears +#$NLN $ESMHS mhsbufrears +${NLN} ${HRS3DB} hirs3bufr_db +${NLN} ${AMUADB} amsuabufr_db +${NLN} ${AMUBDB} amsubbufr_db +#$NLN $MHSDB mhsbufr_db +${NLN} ${SBUVBF} sbuvbufr +${NLN} ${OMPSNPBF} ompsnpbufr +${NLN} ${OMPSLPBF} ompslpbufr +${NLN} ${OMPSTCBF} ompstcbufr +${NLN} ${GOMEBF} gomebufr +${NLN} ${OMIBF} omibufr +${NLN} ${MLSBF} mlsbufr +${NLN} ${SMIPCP} ssmirrbufr +${NLN} ${TMIPCP} tmirrbufr +${NLN} ${AIRSBF} airsbufr +${NLN} ${IASIBF} iasibufr +${NLN} ${ESIASI} iasibufrears +${NLN} ${IASIDB} iasibufr_db +${NLN} ${AMSREBF} amsrebufr +${NLN} ${AMSR2BF} amsr2bufr +#${NLN} ${GMI1CRBF} gmibufr # GMI temporarily disabled due to array overflow. +${NLN} ${SAPHIRBF} saphirbufr +${NLN} ${SEVIRIBF} seviribufr +${NLN} ${CRISBF} crisbufr +${NLN} ${ESCRIS} crisbufrears +${NLN} ${CRISDB} crisbufr_db +${NLN} ${CRISFSBF} crisfsbufr +${NLN} ${ESCRISFS} crisfsbufrears +${NLN} ${CRISFSDB} crisfsbufr_db +${NLN} ${ATMSBF} atmsbufr +${NLN} ${ESATMS} atmsbufrears +${NLN} ${ATMSDB} atmsbufr_db +${NLN} ${SSMITBF} ssmitbufr +${NLN} ${SSMISBF} ssmisbufr +${NLN} ${GPSROBF} gpsrobufr +${NLN} ${TCVITL} tcvitl +${NLN} ${B1AVHAM} avhambufr +${NLN} ${B1AVHPM} avhpmbufr +${NLN} ${AHIBF} ahibufr +${NLN} ${ABIBF} abibufr +${NLN} ${HDOB} hdobbufr +${NLN} ${SSTVIIRS} sstviirs + +[[ ${DONST} = "YES" ]] && ${NLN} ${NSSTBF} nsstbufr + +############################################################## +# Required bias guess files +${NLN} ${GBIAS} satbias_in +${NLN} ${GBIASPC} satbias_pc +${NLN} ${GBIASAIR} aircftbias_in +${NLN} ${GRADSTAT} radstat.gdas + +############################################################## +# Required model guess files +${NLN} ${ATMG03} sigf03 +${NLN} ${ATMGES} sigf06 +${NLN} ${ATMG09} sigf09 + +${NLN} ${SFCG03} sfcf03 +${NLN} ${SFCGES} sfcf06 +${NLN} ${SFCG09} sfcf09 + +[[ -f ${ATMG04} ]] && ${NLN} ${ATMG04} sigf04 +[[ -f ${ATMG05} ]] && ${NLN} ${ATMG05} sigf05 +[[ -f ${ATMG07} ]] && ${NLN} ${ATMG07} sigf07 +[[ -f ${ATMG08} ]] && ${NLN} ${ATMG08} sigf08 + +[[ -f ${SFCG04} ]] && ${NLN} ${SFCG04} sfcf04 +[[ -f ${SFCG05} ]] && ${NLN} ${SFCG05} sfcf05 +[[ -f ${SFCG07} ]] && ${NLN} ${SFCG07} sfcf07 +[[ -f ${SFCG08} ]] && ${NLN} ${SFCG08} sfcf08 + +if [ ${DOHYBVAR} = "YES" ]; then + + # Link ensemble members + mkdir -p ensemble_data + + ENKF_SUFFIX="s" + [[ ${SMOOTH_ENKF} = "NO" ]] && ENKF_SUFFIX="" + + fhrs="06" + if [ ${l4densvar} = ".true." ]; then + fhrs="03 04 05 06 07 08 09" + nhr_obsbin=1 + fi + + for imem in $(seq 1 ${NMEM_ENS}); do + memchar="mem$(printf %03i "${imem}")" + MEMDIR=${memchar} RUN=${GDUMP_ENS} YMD=${gPDY} HH=${gcyc} generate_com COM_ATMOS_HISTORY + + for fhr in ${fhrs}; do + ${NLN} ${COM_ATMOS_HISTORY}/${GPREFIX_ENS}atmf0${fhr}${ENKF_SUFFIX}.nc ./ensemble_data/sigf${fhr}_ens_${memchar} + if [ ${cnvw_option} = ".true." ]; then + ${NLN} ${COM_ATMOS_HISTORY}/${GPREFIX_ENS}sfcf0${fhr}.nc ./ensemble_data/sfcf${fhr}_ens_${memchar} + fi + done + done + +fi + +############################################################## +# Handle inconsistent surface mask between background, ensemble and analysis grids +# This needs re-visiting in the context of NSST; especially references to JCAP* +if [ ${JCAP} -ne ${JCAP_A} ]; then + if [ ${DOHYBVAR} = "YES" -a ${JCAP_A} = ${JCAP_ENKF} ]; then + if [ -e ${SFCGES_ENSMEAN} ]; then + USE_READIN_ANL_SFCMASK=.true. + ${NLN} ${SFCGES_ENSMEAN} sfcf06_anlgrid + else + echo "Warning: Inconsistent sfc mask between analysis and ensemble grids, GSI will interpolate" + fi + else + echo "Warning: Inconsistent sfc mask between analysis and background grids, GSI will interpolate" + fi +fi + +############################################################## +# Diagnostic files +# if requested, link GSI diagnostic file directories for use later +if [ ${GENDIAG} = "YES" ] ; then + if [ ${lrun_subdirs} = ".true." ] ; then + if [ -d ${DIAG_DIR} ]; then + rm -rf ${DIAG_DIR} + fi + npe_m1="$((${npe_gsi}-1))" + for pe in $(seq 0 ${npe_m1}); do + pedir="dir."$(printf %04i ${pe}) + mkdir -p ${DIAG_DIR}/${pedir} + ${NLN} ${DIAG_DIR}/${pedir} ${pedir} + done + else + err_exit "FATAL ERROR: lrun_subdirs must be true. lrun_subdirs=${lrun_subdirs}" + fi +fi + +############################################################## +# Output files +${NLN} ${ATMANL} siganl +${NLN} ${ATMINC} siginc.nc +if [ ${DOHYBVAR} = "YES" -a ${l4densvar} = ".true." -a ${lwrite4danl} = ".true." ]; then + ${NLN} ${ATMA03} siga03 + ${NLN} ${ATMI03} sigi03.nc + ${NLN} ${ATMA04} siga04 + ${NLN} ${ATMI04} sigi04.nc + ${NLN} ${ATMA05} siga05 + ${NLN} ${ATMI05} sigi05.nc + ${NLN} ${ATMA07} siga07 + ${NLN} ${ATMI07} sigi07.nc + ${NLN} ${ATMA08} siga08 + ${NLN} ${ATMI08} sigi08.nc + ${NLN} ${ATMA09} siga09 + ${NLN} ${ATMI09} sigi09.nc +fi +${NLN} ${ABIAS} satbias_out +${NLN} ${ABIASPC} satbias_pc.out +${NLN} ${ABIASAIR} aircftbias_out + +if [ ${DONST} = "YES" ]; then + ${NLN} ${DTFANL} dtfanl +fi + +# If requested, link (and if tarred, de-tar obsinput.tar) into obs_input.* files +if [ ${USE_SELECT} = "YES" ]; then + rm obs_input.* + nl=$(file ${SELECT_OBS} | cut -d: -f2 | grep tar | wc -l) + if [ ${nl} -eq 1 ]; then + rm obsinput.tar + ${NLN} ${SELECT_OBS} obsinput.tar + tar -xvf obsinput.tar + rm obsinput.tar + else + for filetop in $(ls ${SELECT_OBS}/obs_input.*); do + fileloc=$(basename ${filetop}) + ${NLN} ${filetop} ${fileloc} + done + fi +fi + +############################################################## +# If requested, copy and de-tar guess radstat file +if [ ${USE_RADSTAT} = "YES" ]; then + if [ ${USE_CFP} = "YES" ]; then + [[ -f ${DATA}/unzip.sh ]] && rm ${DATA}/unzip.sh + [[ -f ${DATA}/mp_unzip.sh ]] && rm ${DATA}/mp_unzip.sh + cat > ${DATA}/unzip.sh << EOFunzip +#!/bin/sh + diag_file=\$1 + diag_suffix=\$2 + fname=\$(echo \$diag_file | cut -d'.' -f1) + fdate=\$(echo \$diag_file | cut -d'.' -f2) + ${UNCOMPRESS} \$diag_file + fnameges=\$(echo \$fname | sed 's/_ges//g') + ${NMV} \$fname.\$fdate\$diag_suffix \$fnameges +EOFunzip + chmod 755 ${DATA}/unzip.sh + fi + + listdiag=$(tar xvf radstat.gdas | cut -d' ' -f2 | grep _ges) + for type in ${listdiag}; do + diag_file=$(echo ${type} | cut -d',' -f1) + if [ ${USE_CFP} = "YES" ] ; then + echo "${nm} ${DATA}/unzip.sh ${diag_file} ${DIAG_SUFFIX}" | tee -a ${DATA}/mp_unzip.sh + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi + else + fname=$(echo ${diag_file} | cut -d'.' -f1) + date=$(echo ${diag_file} | cut -d'.' -f2) + ${UNCOMPRESS} ${diag_file} + fnameges=$(echo ${fname}|sed 's/_ges//g') + ${NMV} ${fname}.${date}${DIAG_SUFFIX} ${fnameges} + fi + done + + if [ ${USE_CFP} = "YES" ] ; then + chmod 755 ${DATA}/mp_unzip.sh + ncmd=$(cat ${DATA}/mp_unzip.sh | wc -l) + if [ ${ncmd} -gt 0 ]; then + ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) + APRUNCFP_UNZIP=$(eval echo ${APRUNCFP}) + ${APRUNCFP_UNZIP} ${DATA}/mp_unzip.sh + export err=$?; err_chk + fi + fi +fi # if [ $USE_RADSTAT = "YES" ] + +############################################################## +# GSI Namelist options +if [ ${DOHYBVAR} = "YES" ]; then + HYBRID_ENSEMBLE="n_ens=${NMEM_ENS},jcap_ens=${JCAP_ENKF},nlat_ens=${NLAT_ENKF},nlon_ens=${NLON_ENKF},jcap_ens_test=${JCAP_ENKF},${HYBRID_ENSEMBLE}" + if [ ${l4densvar} = ".true." ]; then + SETUP="niter(1)=50,niter(2)=150,niter_no_qc(1)=25,niter_no_qc(2)=0,thin4d=.true.,ens_nstarthr=3,l4densvar=${l4densvar},lwrite4danl=${lwrite4danl},${SETUP}" + JCOPTS="ljc4tlevs=.true.,${JCOPTS}" + STRONGOPTS="tlnmc_option=3,${STRONGOPTS}" + OBSQC="c_varqc=0.04,${OBSQC}" + fi +fi + +if [ ${DONST} = "YES" ]; then + NST="nstinfo=${NSTINFO},fac_dtl=${FAC_DTL},fac_tsl=${FAC_TSL},zsea1=${ZSEA1},zsea2=${ZSEA2},${NST}" +fi + +############################################################## +# Create global_gsi namelist +cat > gsiparm.anl << EOF +&SETUP + miter=2, + niter(1)=100,niter(2)=100, + niter_no_qc(1)=50,niter_no_qc(2)=0, + write_diag(1)=.true.,write_diag(2)=.false.,write_diag(3)=.true., + qoption=2, + gencode=${IGEN:-0},deltim=${DELTIM}, + factqmin=0.5,factqmax=0.0002, + iguess=-1, + tzr_qc=${TZR_QC}, + oneobtest=.false.,retrieval=.false.,l_foto=.false., + use_pbl=.false.,use_compress=.true.,nsig_ext=45,gpstop=50.,commgpstop=45.,commgpserrinf=1.0, + use_gfs_nemsio=.false.,use_gfs_ncio=.true.,sfcnst_comb=.true., + use_readin_anl_sfcmask=${USE_READIN_ANL_SFCMASK}, + lrun_subdirs=${lrun_subdirs}, + crtm_coeffs_path='./crtm_coeffs/', + newpc4pred=.true.,adp_anglebc=.true.,angord=4,passive_bc=.true.,use_edges=.false., + diag_precon=.true.,step_start=1.e-3,emiss_bc=.true.,nhr_obsbin=${nhr_obsbin:-3}, + cwoption=3,imp_physics=${imp_physics},lupp=${lupp},cnvw_option=${cnvw_option},cao_check=${cao_check}, + netcdf_diag=${netcdf_diag},binary_diag=${binary_diag}, + lobsdiag_forenkf=${lobsdiag_forenkf}, + write_fv3_incr=${write_fv3_increment}, + nhr_anal=${IAUFHRS}, + ta2tb=${ta2tb}, + ${WRITE_INCR_ZERO} + ${WRITE_ZERO_STRAT} + ${WRITE_STRAT_EFOLD} + ${SETUP} +/ +&GRIDOPTS + JCAP_B=${JCAP},JCAP=${JCAP_A},NLAT=${NLAT_A},NLON=${NLON_A},nsig=${LEVS}, + regional=.false.,nlayers(63)=3,nlayers(64)=6, + ${GRIDOPTS} +/ +&BKGERR + vs=0.7, + hzscl=1.7,0.8,0.5, + hswgt=0.45,0.3,0.25, + bw=0.0,norsp=4, + bkgv_flowdep=.true.,bkgv_rewgtfct=1.5, + bkgv_write=.false., + cwcoveqqcov=.false., + ${BKGVERR} +/ +&ANBKGERR + anisotropic=.false., + ${ANBKGERR} +/ +&JCOPTS + ljcdfi=.false.,alphajc=0.0,ljcpdry=.true.,bamp_jcpdry=5.0e7, + ${JCOPTS} +/ +&STRONGOPTS + tlnmc_option=2,nstrong=1,nvmodes_keep=8,period_max=6.,period_width=1.5, + ${STRONGOPTS} +/ +&OBSQC + dfact=0.75,dfact1=3.0,noiqc=.true.,oberrflg=.false.,c_varqc=0.02, + use_poq7=.true.,qc_noirjaco3_pole=.true.,vqc=.false.,nvqc=.true., + aircraft_t_bc=.true.,biaspredt=1.0e5,upd_aircraft=.true.,cleanup_tail=.true., + tcp_width=70.0,tcp_ermax=7.35, + ${OBSQC} +/ +&OBS_INPUT + dmesh(1)=145.0,dmesh(2)=150.0,dmesh(3)=100.0,dmesh(4)=50.0,time_window_max=3.0, + ${OBSINPUT} +/ +OBS_INPUT:: +! dfile dtype dplat dsis dval dthin dsfcalc + prepbufr ps null ps 0.0 0 0 + prepbufr t null t 0.0 0 0 + prepbufr_profl t null t 0.0 0 0 + hdobbufr t null t 0.0 0 0 + prepbufr q null q 0.0 0 0 + prepbufr_profl q null q 0.0 0 0 + hdobbufr q null q 0.0 0 0 + prepbufr pw null pw 0.0 0 0 + prepbufr uv null uv 0.0 0 0 + prepbufr_profl uv null uv 0.0 0 0 + satwndbufr uv null uv 0.0 0 0 + hdobbufr uv null uv 0.0 0 0 + prepbufr spd null spd 0.0 0 0 + hdobbufr spd null spd 0.0 0 0 + prepbufr dw null dw 0.0 0 0 + radarbufr rw null rw 0.0 0 0 + nsstbufr sst nsst sst 0.0 0 0 + gpsrobufr gps_bnd null gps 0.0 0 0 + ssmirrbufr pcp_ssmi dmsp pcp_ssmi 0.0 -1 0 + tmirrbufr pcp_tmi trmm pcp_tmi 0.0 -1 0 + sbuvbufr sbuv2 n16 sbuv8_n16 0.0 0 0 + sbuvbufr sbuv2 n17 sbuv8_n17 0.0 0 0 + sbuvbufr sbuv2 n18 sbuv8_n18 0.0 0 0 + hirs3bufr hirs3 n17 hirs3_n17 0.0 1 0 + hirs4bufr hirs4 metop-a hirs4_metop-a 0.0 1 1 + gimgrbufr goes_img g11 imgr_g11 0.0 1 0 + gimgrbufr goes_img g12 imgr_g12 0.0 1 0 + airsbufr airs aqua airs_aqua 0.0 1 1 + amsuabufr amsua n15 amsua_n15 0.0 1 1 + amsuabufr amsua n18 amsua_n18 0.0 1 1 + amsuabufr amsua metop-a amsua_metop-a 0.0 1 1 + airsbufr amsua aqua amsua_aqua 0.0 1 1 + amsubbufr amsub n17 amsub_n17 0.0 1 1 + mhsbufr mhs n18 mhs_n18 0.0 1 1 + mhsbufr mhs metop-a mhs_metop-a 0.0 1 1 + ssmitbufr ssmi f15 ssmi_f15 0.0 1 0 + amsrebufr amsre_low aqua amsre_aqua 0.0 1 0 + amsrebufr amsre_mid aqua amsre_aqua 0.0 1 0 + amsrebufr amsre_hig aqua amsre_aqua 0.0 1 0 + ssmisbufr ssmis f16 ssmis_f16 0.0 1 0 + ssmisbufr ssmis f17 ssmis_f17 0.0 1 0 + ssmisbufr ssmis f18 ssmis_f18 0.0 1 0 + gsnd1bufr sndrd1 g12 sndrD1_g12 0.0 1 0 + gsnd1bufr sndrd2 g12 sndrD2_g12 0.0 1 0 + gsnd1bufr sndrd3 g12 sndrD3_g12 0.0 1 0 + gsnd1bufr sndrd4 g12 sndrD4_g12 0.0 1 0 + gsnd1bufr sndrd1 g11 sndrD1_g11 0.0 1 0 + gsnd1bufr sndrd2 g11 sndrD2_g11 0.0 1 0 + gsnd1bufr sndrd3 g11 sndrD3_g11 0.0 1 0 + gsnd1bufr sndrd4 g11 sndrD4_g11 0.0 1 0 + gsnd1bufr sndrd1 g13 sndrD1_g13 0.0 1 0 + gsnd1bufr sndrd2 g13 sndrD2_g13 0.0 1 0 + gsnd1bufr sndrd3 g13 sndrD3_g13 0.0 1 0 + gsnd1bufr sndrd4 g13 sndrD4_g13 0.0 1 0 + iasibufr iasi metop-a iasi_metop-a 0.0 1 1 + gomebufr gome metop-a gome_metop-a 0.0 2 0 + omibufr omi aura omi_aura 0.0 2 0 + sbuvbufr sbuv2 n19 sbuv8_n19 0.0 0 0 + hirs4bufr hirs4 n19 hirs4_n19 0.0 1 1 + amsuabufr amsua n19 amsua_n19 0.0 1 1 + mhsbufr mhs n19 mhs_n19 0.0 1 1 + tcvitl tcp null tcp 0.0 0 0 + seviribufr seviri m08 seviri_m08 0.0 1 0 + seviribufr seviri m09 seviri_m09 0.0 1 0 + seviribufr seviri m10 seviri_m10 0.0 1 0 + seviribufr seviri m11 seviri_m11 0.0 1 0 + hirs4bufr hirs4 metop-b hirs4_metop-b 0.0 1 1 + amsuabufr amsua metop-b amsua_metop-b 0.0 1 1 + mhsbufr mhs metop-b mhs_metop-b 0.0 1 1 + iasibufr iasi metop-b iasi_metop-b 0.0 1 1 + gomebufr gome metop-b gome_metop-b 0.0 2 0 + atmsbufr atms npp atms_npp 0.0 1 1 + atmsbufr atms n20 atms_n20 0.0 1 1 + crisbufr cris npp cris_npp 0.0 1 0 + crisfsbufr cris-fsr npp cris-fsr_npp 0.0 1 0 + crisfsbufr cris-fsr n20 cris-fsr_n20 0.0 1 0 + gsnd1bufr sndrd1 g14 sndrD1_g14 0.0 1 0 + gsnd1bufr sndrd2 g14 sndrD2_g14 0.0 1 0 + gsnd1bufr sndrd3 g14 sndrD3_g14 0.0 1 0 + gsnd1bufr sndrd4 g14 sndrD4_g14 0.0 1 0 + gsnd1bufr sndrd1 g15 sndrD1_g15 0.0 1 0 + gsnd1bufr sndrd2 g15 sndrD2_g15 0.0 1 0 + gsnd1bufr sndrd3 g15 sndrD3_g15 0.0 1 0 + gsnd1bufr sndrd4 g15 sndrD4_g15 0.0 1 0 + oscatbufr uv null uv 0.0 0 0 + mlsbufr mls30 aura mls30_aura 0.0 0 0 + avhambufr avhrr metop-a avhrr3_metop-a 0.0 4 0 + avhpmbufr avhrr n18 avhrr3_n18 0.0 4 0 + avhambufr avhrr metop-b avhrr3_metop-b 0.0 4 0 + avhambufr avhrr metop-c avhrr3_metop-c 0.0 4 0 + avhpmbufr avhrr n19 avhrr3_n19 0.0 4 0 + amsr2bufr amsr2 gcom-w1 amsr2_gcom-w1 0.0 3 0 + gmibufr gmi gpm gmi_gpm 0.0 1 0 + saphirbufr saphir meghat saphir_meghat 0.0 3 0 + ahibufr ahi himawari8 ahi_himawari8 0.0 1 0 + abibufr abi g16 abi_g16 0.0 1 0 + abibufr abi g17 abi_g17 0.0 1 0 + rapidscatbufr uv null uv 0.0 0 0 + ompsnpbufr ompsnp npp ompsnp_npp 0.0 0 0 + ompslpbufr ompslp npp ompslp_npp 0.0 0 0 + ompstcbufr ompstc8 npp ompstc8_npp 0.0 2 0 + ompsnpbufr ompsnp n20 ompsnp_n20 0.0 0 0 + ompstcbufr ompstc8 n20 ompstc8_n20 0.0 2 0 + amsuabufr amsua metop-c amsua_metop-c 0.0 1 1 + mhsbufr mhs metop-c mhs_metop-c 0.0 1 1 + iasibufr iasi metop-c iasi_metop-c 0.0 1 1 + sstviirs viirs-m npp viirs-m_npp 0.0 4 0 + sstviirs viirs-m j1 viirs-m_j1 0.0 4 0 + ahibufr ahi himawari9 ahi_himawari9 0.0 1 0 + atmsbufr atms n21 atms_n21 0.0 1 1 + crisfsbufr cris-fsr n21 cris-fsr_n21 0.0 1 0 + sstviirs viirs-m j2 viirs-m_j2 0.0 4 0 + ompsnpbufr ompsnp n21 ompsnp_n21 0.0 0 0 + ompstcbufr ompstc8 n21 ompstc8_n21 0.0 2 0 + gomebufr gome metop-c gome_metop-c 0.0 2 0 +:: +&SUPEROB_RADAR + ${SUPERRAD} +/ +&LAG_DATA + ${LAGDATA} +/ +&HYBRID_ENSEMBLE + l_hyb_ens=${l_hyb_ens}, + generate_ens=.false., + beta_s0=0.125,readin_beta=.false., + s_ens_h=800.,s_ens_v=-0.8,readin_localization=.true., + aniso_a_en=.false.,oz_univ_static=.false.,uv_hyb_ens=.true., + ensemble_path='./ensemble_data/', + ens_fast_read=.true., + ${HYBRID_ENSEMBLE} +/ +&RAPIDREFRESH_CLDSURF + dfi_radar_latent_heat_time_period=30.0, + ${RAPIDREFRESH_CLDSURF} +/ +&CHEM + ${CHEM} +/ +&SINGLEOB_TEST + maginnov=0.1,magoberr=0.1,oneob_type='t', + oblat=45.,oblon=180.,obpres=1000.,obdattim=${CDATE}, + obhourset=0., + ${SINGLEOB} +/ +&NST + nst_gsi=${NST_GSI}, + ${NST} +/ +EOF +cat gsiparm.anl + +############################################################## +# Run gsi analysis + +export OMP_NUM_THREADS=${NTHREADS_GSI} +export pgm=${GSIEXEC} +. prep_step + +${NCP} ${GSIEXEC} ${DATA} +${APRUN_GSI} ${DATA}/$(basename ${GSIEXEC}) 1>&1 2>&2 +export err=$?; err_chk + + +############################################################## +# If full analysis field written, calculate analysis increment +# here before releasing FV3 forecast +if [ ${DO_CALC_INCREMENT} = "YES" ]; then + ${CALCINCPY} + export err=$?; err_chk +fi + + +############################################################## +# For eupd +if [ -s satbias_out.int ]; then + ${NCP} satbias_out.int ${ABIASe} +else + ${NCP} satbias_in ${ABIASe} +fi + +# Cat runtime output files. +cat fort.2* > ${GSISTAT} + +# If requested, create obsinput tarball from obs_input.* files +if [ ${RUN_SELECT} = "YES" ]; then + echo $(date) START tar obs_input >&2 + [[ -s obsinput.tar ]] && rm obsinput.tar + ${NLN} ${SELECT_OBS} obsinput.tar + ${CHGRP_CMD} obs_input.* + tar -cvf obsinput.tar obs_input.* + chmod 750 ${SELECT_OBS} + ${CHGRP_CMD} ${SELECT_OBS} + rm obsinput.tar + echo $(date) END tar obs_input >&2 +fi + +################################################################################ +# Send alerts +if [ ${SENDDBN} = "YES" ]; then + if [ ${RUN} = "gfs" ]; then + ${DBNROOT}/bin/dbn_alert MODEL GFS_abias ${job} ${ABIAS} + fi +fi + +################################################################################ +# Postprocessing +cd ${pwd} +[[ ${mkdata} = "YES" ]] && rm -rf ${DATA} + +############################################################## +# Add this statement to release the forecast job once the +# atmopsheric analysis and updated surface RESTARTS are +# available. Do not release forecast when RUN=enkf +############################################################## +if [ ${SENDECF} = "YES" -a "${RUN}" != "enkf" ]; then + ecflow_client --event release_fcst +fi +echo "${CDUMP} ${CDATE} atminc done at $(date)" > ${COM_ATMOS_ANALYSIS}/${APREFIX}loginc.txt + +################################################################################ + +exit ${err} + +################################################################################ diff --git a/scripts/exglobal_atmos_analysis_calc.sh b/scripts/exglobal_atmos_analysis_calc.sh new file mode 100755 index 0000000000..b353d3c52b --- /dev/null +++ b/scripts/exglobal_atmos_analysis_calc.sh @@ -0,0 +1,197 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_atmos_analysis_calc.sh +# Script description: Runs non-diagnostic file tasks after GSI analysis is performed +# +# Author: Cory Martin Org: NCEP/EMC Date: 2020-03-03 +# +# Abstract: This script wraps up analysis-related tasks after GSI exits successfully +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +# Set environment. + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +pwd=$(pwd) +export FIXgsm=${FIXgsm:-$HOMEgfs/fix/am} + +# Base variables +CDUMP=${CDUMP:-"gdas"} +GDUMP=${GDUMP:-"gdas"} + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} +COMPRESS=${COMPRESS:-gzip} +UNCOMPRESS=${UNCOMPRESS:-gunzip} +APRUNCFP=${APRUNCFP:-""} + +# Diagnostic files options +netcdf_diag=${netcdf_diag:-".true."} +binary_diag=${binary_diag:-".false."} + +# IAU +DOIAU=${DOIAU:-"NO"} +export IAUFHRS=${IAUFHRS:-"6"} + +# Dependent Scripts and Executables +export NTHREADS_CALCINC=${NTHREADS_CALCINC:-1} +export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} +export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}} +export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}} + +export CALCANLEXEC=${CALCANLEXEC:-$HOMEgfs/exec/calc_analysis.x} +export CHGRESNCEXEC=${CHGRESNCEXEC:-$HOMEgfs/exec/enkf_chgres_recenter_nc.x} +export CHGRESINCEXEC=${CHGRESINCEXEC:-$HOMEgfs/exec/interp_inc.x} +export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1} +CALCINCPY=${CALCINCPY:-$HOMEgfs/ush/calcinc_gfs.py} +CALCANLPY=${CALCANLPY:-$HOMEgfs/ush/calcanl_gfs.py} + +DOGAUSFCANL=${DOGAUSFCANL-"NO"} +GAUSFCANLSH=${GAUSFCANLSH:-$HOMEgfs/ush/gaussian_sfcanl.sh} +export GAUSFCANLEXE=${GAUSFCANLEXE:-$HOMEgfs/exec/gaussian_sfcanl.x} +NTHREADS_GAUSFCANL=${NTHREADS_GAUSFCANL:-1} +APRUN_GAUSFCANL=${APRUN_GAUSFCANL:-${APRUN:-""}} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} + +# Guess files +GPREFIX=${GPREFIX:-""} +ATMG03=${ATMG03:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf003.nc} +ATMG04=${ATMG04:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf004.nc} +ATMG05=${ATMG05:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf005.nc} +ATMGES=${ATMGES:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf006.nc} +ATMG07=${ATMG07:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf007.nc} +ATMG08=${ATMG08:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf008.nc} +ATMG09=${ATMG09:-${COM_ATMOS_HISTORY_PREV}/${GPREFIX}atmf009.nc} + +# Analysis files +export APREFIX=${APREFIX:-""} +SFCANL=${SFCANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}sfcanl.nc} +DTFANL=${DTFANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc} +ATMANL=${ATMANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmanl.nc} + +# Increment files +ATMINC=${ATMINC:-${COM_ATMOS_ANALYSIS}/${APREFIX}atminc.nc} + +# Set script / GSI control parameters +DOHYBVAR=${DOHYBVAR:-"NO"} +lrun_subdirs=${lrun_subdirs:-".true."} +if [ $DOHYBVAR = "YES" ]; then + l_hyb_ens=.true. + export l4densvar=${l4densvar:-".false."} + export lwrite4danl=${lwrite4danl:-".false."} +else + l_hyb_ens=.false. + export l4densvar=.false. + export lwrite4danl=.false. +fi + +# Set 4D-EnVar specific variables +if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then + ATMA03=${ATMA03:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma003.nc} + ATMI03=${ATMI03:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi003.nc} + ATMA04=${ATMA04:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma004.nc} + ATMI04=${ATMI04:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi004.nc} + ATMA05=${ATMA05:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma005.nc} + ATMI05=${ATMI05:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi005.nc} + ATMA07=${ATMA07:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma007.nc} + ATMI07=${ATMI07:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi007.nc} + ATMA08=${ATMA08:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma008.nc} + ATMI08=${ATMI08:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi008.nc} + ATMA09=${ATMA09:-${COM_ATMOS_ANALYSIS}/${APREFIX}atma009.nc} + ATMI09=${ATMI09:-${COM_ATMOS_ANALYSIS}/${APREFIX}atmi009.nc} +fi + +################################################################################ +################################################################################ +# Preprocessing +mkdata=NO +if [ ! -d $DATA ]; then + mkdata=YES + mkdir -p $DATA +fi + +cd $DATA || exit 99 + +################################################################################ +# Clean the run-directory +rm -rf dir.* + +############################################################## +# If analysis increment is written by GSI, produce an analysis file here +if [ $DO_CALC_ANALYSIS == "YES" ]; then + # link analysis and increment files + $NLN $ATMANL siganl + $NLN $ATMINC siginc.nc + if [ $DOHYBVAR = "YES" -a $l4densvar = ".true." -a $lwrite4danl = ".true." ]; then + $NLN $ATMA03 siga03 + $NLN $ATMI03 sigi03.nc + $NLN $ATMA04 siga04 + $NLN $ATMI04 sigi04.nc + $NLN $ATMA05 siga05 + $NLN $ATMI05 sigi05.nc + $NLN $ATMA07 siga07 + $NLN $ATMI07 sigi07.nc + $NLN $ATMA08 siga08 + $NLN $ATMI08 sigi08.nc + $NLN $ATMA09 siga09 + $NLN $ATMI09 sigi09.nc + fi + # link guess files + $NLN $ATMG03 sigf03 + $NLN $ATMGES sigf06 + $NLN $ATMG09 sigf09 + + [[ -f $ATMG04 ]] && $NLN $ATMG04 sigf04 + [[ -f $ATMG05 ]] && $NLN $ATMG05 sigf05 + [[ -f $ATMG07 ]] && $NLN $ATMG07 sigf07 + [[ -f $ATMG08 ]] && $NLN $ATMG08 sigf08 + + # Link hourly backgrounds (if present) + if [ -f $ATMG04 -a -f $ATMG05 -a -f $ATMG07 -a -f $ATMG08 ]; then + nhr_obsbin=1 + fi + + $CALCANLPY + export err=$?; err_chk +else + echo "Neither increment nor analysis are generated by external utils" +fi + +############################################################## +# Create gaussian grid surface analysis file at middle of window +if [ $DOGAUSFCANL = "YES" ]; then + export APRUNSFC=$APRUN_GAUSFCANL + export OMP_NUM_THREADS_SFC=$NTHREADS_GAUSFCANL + + $GAUSFCANLSH + export err=$?; err_chk +fi + +echo "${CDUMP} ${PDY}${cyc} atmanl and sfcanl done at $(date)" > "${COM_ATMOS_ANALYSIS}/${APREFIX}loganl.txt" + +################################################################################ +# Postprocessing +cd $pwd +[[ $mkdata = "YES" ]] && rm -rf $DATA + + +exit $err + diff --git a/scripts/exglobal_atmos_pmgr.sh b/scripts/exglobal_atmos_pmgr.sh index 1a00eda1a6..6e4c2ed3f4 100755 --- a/scripts/exglobal_atmos_pmgr.sh +++ b/scripts/exglobal_atmos_pmgr.sh @@ -1,13 +1,14 @@ -#! /bin/ksh +#! /usr/bin/env bash + # # Script name: exgfs_pmgr.sh.sms # # This script monitors the progress of the gfs_fcst job # -set -x + +source "$HOMEgfs/ush/preamble.sh" hour=00 -typeset -Z2 hour case $RUN in gfs) @@ -25,11 +26,9 @@ if [ -e posthours ]; then fi while [ $hour -lt $TCP ]; do + hour=$(printf "%02d" $hour) echo $hour >>posthours - if [ $hour -lt 120 ]; then - if [ $hour -eq 99 ]; then - typeset -Z3 hour - fi + if [ 10#$hour -lt 120 ]; then let "hour=hour+1" else let "hour=hour+3" @@ -69,6 +68,5 @@ while [ $icnt -lt 1000 ]; do fi done -echo Exiting $0 exit diff --git a/scripts/exglobal_atmos_sfcanl.sh b/scripts/exglobal_atmos_sfcanl.sh new file mode 100755 index 0000000000..f173886a07 --- /dev/null +++ b/scripts/exglobal_atmos_sfcanl.sh @@ -0,0 +1,224 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_atmos_sfcanl.sh +# Script description: Makes global model surface analysis files +# +# Author: Russ Treadon Org: NCEP/EMC Date: 2021-12-13 +# +# Abstract: This script makes global model surface analysis files +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +# Set environment. + +source "${HOMEgfs}/ush/preamble.sh" + +# Directories. +pwd=$(pwd) + +# Derived base variables +# Ignore possible spelling error (nothing is misspelled) +# shellcheck disable=SC2153 +GDATE=$(${NDATE} -"${assim_freq}" "${PDY}${cyc}") +BDATE=$(${NDATE} -3 "${PDY}${cyc}") +bPDY=${BDATE:0:8} +bcyc=${BDATE:8:2} + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} +COMPRESS=${COMPRESS:-gzip} +UNCOMPRESS=${UNCOMPRESS:-gunzip} +APRUNCFP=${APRUNCFP:-""} + +# IAU +DOIAU=${DOIAU:-"NO"} +export IAUFHRS=${IAUFHRS:-"6"} + +# Surface cycle related parameters +CYCLESH=${CYCLESH:-${HOMEgfs}/ush/global_cycle.sh} +export CYCLEXEC=${CYCLEXEC:-${HOMEgfs}/exec/global_cycle} +NTHREADS_CYCLE=${NTHREADS_CYCLE:-24} +APRUN_CYCLE=${APRUN_CYCLE:-${APRUN:-""}} +export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-'-2.'} +export CYCLVARS=${CYCLVARS:-""} +export FHOUR=${FHOUR:-0} +export DELTSFC=${DELTSFC:-6} +export FIXgsm=${FIXgsm:-${HOMEgfs}/fix/am} +export FIXfv3=${FIXfv3:-${HOMEgfs}/fix/orog} + +# FV3 specific info (required for global_cycle) +export CASE=${CASE:-"C384"} +ntiles=${ntiles:-6} + +# IAU +DOIAU=${DOIAU:-"NO"} +export IAUFHRS=${IAUFHRS:-"6"} + +# Dependent Scripts and Executables +export NTHREADS_CALCINC=${NTHREADS_CALCINC:-1} +export APRUN_CALCINC=${APRUN_CALCINC:-${APRUN:-""}} +export APRUN_CALCANL=${APRUN_CALCANL:-${APRUN:-""}} +export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}} + +export CALCANLEXEC=${CALCANLEXEC:-${HOMEgfs}/exec/calc_analysis.x} +export CHGRESNCEXEC=${CHGRESNCEXEC:-${HOMEgfs}/exec/enkf_chgres_recenter_nc.x} +export CHGRESINCEXEC=${CHGRESINCEXEC:-${HOMEgfs}/exec/interp_inc.x} +export NTHREADS_CHGRES=${NTHREADS_CHGRES:-1} +CALCINCPY=${CALCINCPY:-${HOMEgfs}/ush/calcinc_gfs.py} +CALCANLPY=${CALCANLPY:-${HOMEgfs}/ush/calcanl_gfs.py} + +export APRUN_CHGRES=${APRUN_CALCANL:-${APRUN:-""}} +CHGRESEXEC=${CHGRESEXEC:-${HOMEgfs}/exec/enkf_chgres_recenter.x} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} +RUN_GETGES=${RUN_GETGES:-"NO"} +GETGESSH=${GETGESSH:-"getges.sh"} +export gesenvir=${gesenvir:-${envir}} + +# Observations +OPREFIX=${OPREFIX:-""} +OSUFFIX=${OSUFFIX:-""} + +# Guess files +GPREFIX=${GPREFIX:-""} + +# Analysis files +export APREFIX=${APREFIX:-""} +DTFANL=${DTFANL:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc} + +# Get dimension information based on CASE +res=$(echo ${CASE} | cut -c2-) +JCAP_CASE=$((res*2-2)) +LATB_CASE=$((res*2)) +LONB_CASE=$((res*4)) + +################################################################################ +# Preprocessing +mkdata=NO +if [[ ! -d ${DATA} ]]; then + mkdata=YES + mkdir -p ${DATA} +fi + +cd ${DATA} || exit 99 + +if [[ ${DONST} = "YES" ]]; then + export NSSTBF="${COM_OBS}/${OPREFIX}nsstbufr" + ${NLN} ${NSSTBF} nsstbufr +fi + + +############################################################## +# Required model guess files + + +############################################################## +# Output files +if [[ ${DONST} = "YES" ]]; then + ${NLN} ${DTFANL} dtfanl +fi + + +############################################################## +# Update surface fields in the FV3 restart's using global_cycle +mkdir -p "${COM_ATMOS_RESTART}" + +# Global cycle requires these files +export FNTSFA=${FNTSFA:-${COM_OBS}/${OPREFIX}rtgssthr.grb} +export FNACNA=${FNACNA:-${COM_OBS}/${OPREFIX}seaice.5min.blend.grb} +export FNSNOA=${FNSNOA:-${COM_OBS}/${OPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f ${FNSNOA} ]] && export FNSNOA="${COM_OBS}/${OPREFIX}snogrb_t1534.3072.1536" +FNSNOG=${FNSNOG:-${COM_OBS_PREV}/${GPREFIX}snogrb_t${JCAP_CASE}.${LONB_CASE}.${LATB_CASE}} +[[ ! -f ${FNSNOG} ]] && FNSNOG="${COM_OBS_PREV}/${GPREFIX}snogrb_t1534.3072.1536" + +# Set CYCLVARS by checking grib date of current snogrb vs that of prev cycle +if [[ ${RUN_GETGES} = "YES" ]]; then + snoprv=$(${GETGESSH} -q -t snogrb_${JCAP_CASE} -e ${gesenvir} -n ${GDUMP} -v ${GDATE}) +else + snoprv=${snoprv:-${FNSNOG}} +fi + +if [[ $(${WGRIB} -4yr ${FNSNOA} 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') -le \ + $(${WGRIB} -4yr ${snoprv} 2>/dev/null | grep -i snowc | awk -F: '{print $3}' | awk -F= '{print $2}') ]] ; then + export FNSNOA=" " + export CYCLVARS="FSNOL=99999.,FSNOS=99999.," +else + export SNOW_NUDGE_COEFF=${SNOW_NUDGE_COEFF:-0.} + export CYCLVARS="FSNOL=${SNOW_NUDGE_COEFF},${CYCLVARS}" +fi + +if [[ ${DONST} = "YES" ]]; then + export NST_FILE=${GSI_FILE:-${COM_ATMOS_ANALYSIS}/${APREFIX}dtfanl.nc} +else + export NST_FILE="NULL" +fi + +if [[ ${DOIAU} = "YES" ]]; then + # update surface restarts at the beginning of the window, if IAU + # For now assume/hold dtfanl.nc valid at beginning of window + for n in $(seq 1 ${ntiles}); do + ${NCP} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" + ${NLN} "${COM_ATMOS_RESTART_PREV}/${bPDY}.${bcyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" + ${NLN} "${COM_ATMOS_RESTART}/${bPDY}.${bcyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" + done + + export APRUNCY=${APRUN_CYCLE} + export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE} + export MAX_TASKS_CY=${ntiles} + + CDATE="${PDY}${cyc}" ${CYCLESH} + export err=$?; err_chk +fi + +# Update surface restarts at middle of window +for n in $(seq 1 ${ntiles}); do + if [[ ${DO_JEDILANDDA:-"NO"} = "YES" ]]; then + ${NCP} "${COM_LAND_ANALYSIS}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + else + ${NCP} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" \ + "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" + fi + ${NLN} "${COM_ATMOS_RESTART_PREV}/${PDY}.${cyc}0000.sfc_data.tile${n}.nc" "${DATA}/fnbgsi.00${n}" + ${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile${n}.nc" "${DATA}/fnbgso.00${n}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_grid.tile${n}.nc" "${DATA}/fngrid.00${n}" + ${NLN} "${FIXfv3}/${CASE}/${CASE}_oro_data.tile${n}.nc" "${DATA}/fnorog.00${n}" +done + +export APRUNCY=${APRUN_CYCLE} +export OMP_NUM_THREADS_CY=${NTHREADS_CYCLE} +export MAX_TASKS_CY=${ntiles} + +CDATE="${PDY}${cyc}" ${CYCLESH} +export err=$?; err_chk + + +################################################################################ +# Postprocessing +cd ${pwd} +[[ ${mkdata} = "YES" ]] && rm -rf ${DATA} + + +################################################################################ + +exit ${err} + +################################################################################ diff --git a/scripts/exglobal_atmos_tropcy_qc_reloc.sh b/scripts/exglobal_atmos_tropcy_qc_reloc.sh index f158b10ff9..380441a6c9 100755 --- a/scripts/exglobal_atmos_tropcy_qc_reloc.sh +++ b/scripts/exglobal_atmos_tropcy_qc_reloc.sh @@ -1,52 +1,42 @@ +#! /usr/bin/env bash + ############################################################################ -echo "---------------------------------------------------------------------" -echo "exglobal_atmos_tropcy_qc_reloc.sh - Tropical Cyclone QC/Relocation Prcocessing" -echo "---------------------------------------------------------------------" -echo "History: Jun 13 2006 - Original script." -echo " March 2013 - No changes needed for WCOSS transition" -echo " MP_LABELIO default added" -echo " Oct 2013 - Use main USH vars as part of minor pkg cleanup" +# echo "---------------------------------------------------------------------" +# echo "exglobal_atmos_tropcy_qc_reloc.sh - Tropical Cyclone QC/Relocation Prcocessing" +# echo "---------------------------------------------------------------------" +# echo "History: Jun 13 2006 - Original script." +# echo " March 2013 - No changes needed for WCOSS transition" +# echo " MP_LABELIO default added" +# echo " Oct 2013 - Use main USH vars as part of minor pkg cleanup" ############################################################################ -set -x +source "$HOMEgfs/ush/preamble.sh" # Make sure we are in the $DATA directory cd $DATA -msg="HAS BEGUN on $(hostname)" -postmsg "$jlogfile" "$msg" - cat break > $pgmout -export COMSP=$COMOUT/${RUN}.${cycle}. - tmhr=$(echo $tmmark|cut -c3-4) cdate10=$( ${NDATE:?} -$tmhr $PDY$cyc) NET_uc=$(echo $RUN | tr [a-z] [A-Z]) tmmark_uc=$(echo $tmmark | tr [a-z] [A-Z]) -msg="$NET_uc ANALYSIS TIME IS $PDY$cyc" -postmsg "$jlogfile" "$msg" - iflag=0 if [ $RUN = ndas ]; then if [ $DO_RELOCATE = NO ]; then - msg="CENTER PROCESSING TIME FOR NDAS TROPICAL CYCLONE QC IS $cdate10" - postmsg "$jlogfile" "$msg" - msg="Output tcvitals files will be copied forward in time to proper \ + echo "CENTER PROCESSING TIME FOR NDAS TROPICAL CYCLONE QC IS $cdate10" + echo "Output tcvitals files will be copied forward in time to proper \ output file directory path locations" - postmsg "$jlogfile" "$msg" iflag=1 else - msg="CENTER PROCESSING TIME FOR $tmmark_uc NDAS TROPICAL CYCLONE \ + echo "CENTER PROCESSING TIME FOR $tmmark_uc NDAS TROPICAL CYCLONE \ RELOCATION IS $cdate10" - postmsg "$jlogfile" "$msg" fi else - msg="CENTER PROCESSING TIME FOR $tmmark_uc $NET_uc TROPICAL CYCLONE QC/\ + echo "CENTER PROCESSING TIME FOR $tmmark_uc $NET_uc TROPICAL CYCLONE QC/\ RELOCATION IS $cdate10" - postmsg "$jlogfile" "$msg" fi @@ -63,17 +53,14 @@ if [ "$PROCESS_TROPCY" = 'YES' ]; then ${USHSYND:-$HOMEgfs/ush}/syndat_qctropcy.sh $cdate10 errsc=$? if [ "$errsc" -ne '0' ]; then - msg="syndat_qctropcy.sh failed. exit" - postmsg "$jlogfile" "$msg" + echo "syndat_qctropcy.sh failed. exit" exit $errsc fi - cd $COMOUT + cd "${COM_OBS}" || exit 1 pwd - set +x ls -ltr *syndata* - set -x cd $ARCHSYND pwd;ls -ltr cat syndat_dateck @@ -90,10 +77,10 @@ else # don't want to wipe out these files) # - [ ! -s ${COMSP}syndata.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}syndata.tcvitals.$tmmark - [ ! -s ${COMSP}jtwc-fnoc.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}jtwc-fnoc.tcvitals.$tmmark + [ ! -s "${COM_OBS}/${RUN}.t${cyc}z.syndata.tcvitals.${tmmark}" ] && \ + cp "/dev/null" "${COM_OBS}/${RUN}.t${cyc}z.syndata.tcvitals.${tmmark}" + [ ! -s "${COM_OBS}/${RUN}.t${cyc}z.jtwc-fnoc.tcvitals.${tmmark}" ] && \ + cp "/dev/null" "${COM_OBS}/${RUN}.t${cyc}z.jtwc-fnoc.tcvitals.${tmmark}" # endif loop $PROCESS_TROPCY fi @@ -121,25 +108,25 @@ if [ "$DO_RELOCATE" = 'YES' ]; then [ $RUN = gfs -o $RUN = gdas -o $NET = cfs ] && qual_last="" if [ $BKGFREQ -eq 1 ]; then - [ -s sgm3prep ] && cp sgm3prep ${COMSP}sgm3prep${qual_last} - [ -s sgm2prep ] && cp sgm2prep ${COMSP}sgm2prep${qual_last} - [ -s sgm1prep ] && cp sgm1prep ${COMSP}sgm1prep${qual_last} - [ -s sgesprep ] && cp sgesprep ${COMSP}sgesprep${qual_last} - [ -s sgp1prep ] && cp sgp1prep ${COMSP}sgp1prep${qual_last} - [ -s sgp2prep ] && cp sgp2prep ${COMSP}sgp2prep${qual_last} - [ -s sgp3prep ] && cp sgp3prep ${COMSP}sgp3prep${qual_last} + if [[ -s sgm3prep ]]; then cp "sgm3prep" "${COM_OBS}/${RUN}.t${cyc}z.sgm3prep${qual_last}"; fi + if [[ -s sgm2prep ]]; then cp "sgm2prep" "${COM_OBS}/${RUN}.t${cyc}z.sgm2prep${qual_last}"; fi + if [[ -s sgm1prep ]]; then cp "sgm1prep" "${COM_OBS}/${RUN}.t${cyc}z.sgm1prep${qual_last}"; fi + if [[ -s sgesprep ]]; then cp "sgesprep" "${COM_OBS}/${RUN}.t${cyc}z.sgesprep${qual_last}"; fi + if [[ -s sgp1prep ]]; then cp "sgp1prep" "${COM_OBS}/${RUN}.t${cyc}z.sgp1prep${qual_last}"; fi + if [[ -s sgp2prep ]]; then cp "sgp2prep" "${COM_OBS}/${RUN}.t${cyc}z.sgp2prep${qual_last}"; fi + if [[ -s sgp3prep ]]; then cp "sgp3prep" "${COM_OBS}/${RUN}.t${cyc}z.sgp3prep${qual_last}"; fi elif [ $BKGFREQ -eq 3 ]; then - [ -s sgm3prep ] && cp sgm3prep ${COMSP}sgm3prep${qual_last} - [ -s sgesprep ] && cp sgesprep ${COMSP}sgesprep${qual_last} - [ -s sgp3prep ] && cp sgp3prep ${COMSP}sgp3prep${qual_last} + if [[ -s sgm3prep ]]; then cp "sgm3prep" "${COM_OBS}/${RUN}.t${cyc}z.sgm3prep${qual_last}"; fi + if [[ -s sgesprep ]]; then cp "sgesprep" "${COM_OBS}/${RUN}.t${cyc}z.sgesprep${qual_last}"; fi + if [[ -s sgp3prep ]]; then cp "sgp3prep" "${COM_OBS}/${RUN}.t${cyc}z.sgp3prep${qual_last}"; fi fi -# The existence of ${COMSP}tropcy_relocation_status.$tmmark file will tell the +# The existence of ${COM_OBS}/${RUN}.t${cyc}z.tropcy_relocation_status.$tmmark file will tell the # subsequent PREP processing that RELOCATION processing occurred, if this file # does not already exist at this point, echo "RECORDS PROCESSED" into it to # further tell PREP processing that records were processed by relocation and # the global sigma guess was modified by tropical cyclone relocation -# Note: If ${COMSP}tropcy_relocation_status.$tmmark already exists at this +# Note: If ${COM_OBS}/${RUN}.t${cyc}z.tropcy_relocation_status.$tmmark already exists at this # point it means that it contains the string "NO RECORDS to process" # and was created by the child script tropcy_relocate.sh because records # were not processed by relocation and the global sigma guess was NOT @@ -147,8 +134,9 @@ if [ "$DO_RELOCATE" = 'YES' ]; then # were found in the relocation step) # ---------------------------------------------------------------------------- - [ ! -s ${COMSP}tropcy_relocation_status.$tmmark ] && \ - echo "RECORDS PROCESSED" > ${COMSP}tropcy_relocation_status.$tmmark + if [[ ! -s "${COM_OBS}/${RUN}.t${cyc}z.tropcy_relocation_status.${tmmark}" ]]; then + echo "RECORDS PROCESSED" > "${COM_OBS}/${RUN}.t${cyc}z.tropcy_relocation_status.${tmmark}" + fi # endif loop $DO_RELOCATE fi @@ -156,27 +144,8 @@ fi ######################################################## -# GOOD RUN -set +x -echo " " -echo " ****** PROCESSING COMPLETED NORMALLY" -echo " ****** PROCESSING COMPLETED NORMALLY" -echo " ****** PROCESSING COMPLETED NORMALLY" -echo " ****** PROCESSING COMPLETED NORMALLY" -echo " " -set -x - - # save standard output -cat break $pgmout break > allout -cat allout -# rm allout +cat break $pgmout break -sleep 10 - -if [ $iflag -eq 0 ]; then - msg='ENDED NORMALLY.' - postmsg "$jlogfile" "$msg" -fi ################## END OF SCRIPT ####################### diff --git a/scripts/exglobal_diag.sh b/scripts/exglobal_diag.sh new file mode 100755 index 0000000000..3aa1093fad --- /dev/null +++ b/scripts/exglobal_diag.sh @@ -0,0 +1,282 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: exglobal_diag.sh +# Script description: Creates diagnostic files after GSI analysis is performed +# +# Author: Cory Martin Org: NCEP/EMC Date: 2020-03-03 +# +# Abstract: This script creates GSI diagnostic files after GSI exits successfully +# +# $Id$ +# +# Attributes: +# Language: POSIX shell +# +################################################################################ + +# Set environment. + +source "$HOMEgfs/ush/preamble.sh" + +# Directories. +pwd=$(pwd) + +# Base variables +CDATE="${PDY}${cyc}" +CDUMP=${CDUMP:-"gdas"} +GDUMP=${GDUMP:-"gdas"} + +# Utilities +export NCP=${NCP:-"/bin/cp"} +export NMV=${NMV:-"/bin/mv"} +export NLN=${NLN:-"/bin/ln -sf"} +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +export NCLEN=${NCLEN:-$HOMEgfs/ush/getncdimlen} +export CATEXEC=${CATEXEC:-$ncdiag_ROOT/bin/ncdiag_cat_serial.x} +COMPRESS=${COMPRESS:-gzip} +UNCOMPRESS=${UNCOMPRESS:-gunzip} +APRUNCFP=${APRUNCFP:-""} + +# Diagnostic files options +netcdf_diag=${netcdf_diag:-".true."} +binary_diag=${binary_diag:-".false."} + +# OPS flags +RUN=${RUN:-""} +SENDECF=${SENDECF:-"NO"} +SENDDBN=${SENDDBN:-"NO"} + +# Guess files + +# Analysis files +export APREFIX=${APREFIX:-""} +RADSTAT=${RADSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}radstat} +PCPSTAT=${PCPSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}pcpstat} +CNVSTAT=${CNVSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}cnvstat} +OZNSTAT=${OZNSTAT:-${COM_ATMOS_ANALYSIS}/${APREFIX}oznstat} + +# Remove stat file if file already exists +[[ -s $RADSTAT ]] && rm -f $RADSTAT +[[ -s $PCPSTAT ]] && rm -f $PCPSTAT +[[ -s $CNVSTAT ]] && rm -f $CNVSTAT +[[ -s $OZNSTAT ]] && rm -f $OZNSTAT + +# Obs diag +GENDIAG=${GENDIAG:-"YES"} +DIAG_SUFFIX=${DIAG_SUFFIX:-""} +if [ $netcdf_diag = ".true." ] ; then + DIAG_SUFFIX="${DIAG_SUFFIX}.nc4" +fi +DIAG_COMPRESS=${DIAG_COMPRESS:-"YES"} +DIAG_TARBALL=${DIAG_TARBALL:-"YES"} +USE_CFP=${USE_CFP:-"NO"} +CFP_MP=${CFP_MP:-"NO"} +nm="" +if [ $CFP_MP = "YES" ]; then + nm=0 +fi +DIAG_DIR=${DIAG_DIR:-${COM_ATMOS_ANALYSIS}/gsidiags} +REMOVE_DIAG_DIR=${REMOVE_DIAG_DIR:-"NO"} + +# Set script / GSI control parameters +lrun_subdirs=${lrun_subdirs:-".true."} + + +################################################################################ +# If requested, generate diagnostic files +if [ $GENDIAG = "YES" ] ; then + if [ $lrun_subdirs = ".true." ] ; then + for pe in $DIAG_DIR/dir.*; do + pedir="$(basename -- $pe)" + $NLN $pe $DATA/$pedir + done + else + err_exit "***FATAL ERROR*** lrun_subdirs must be true. Abort job" + fi + + # Set up lists and variables for various types of diagnostic files. + ntype=3 + + diagtype[0]="conv conv_gps conv_ps conv_pw conv_q conv_sst conv_t conv_tcp conv_uv conv_spd" + diagtype[1]="pcp_ssmi_dmsp pcp_tmi_trmm" + diagtype[2]="sbuv2_n16 sbuv2_n17 sbuv2_n18 sbuv2_n19 gome_metop-a gome_metop-b omi_aura mls30_aura ompsnp_npp ompstc8_npp ompstc8_n20 ompsnp_n20 ompstc8_n21 ompsnp_n21 ompslp_npp gome_metop-c" + diagtype[3]="hirs2_n14 msu_n14 sndr_g08 sndr_g11 sndr_g12 sndr_g13 sndr_g08_prep sndr_g11_prep sndr_g12_prep sndr_g13_prep sndrd1_g11 sndrd2_g11 sndrd3_g11 sndrd4_g11 sndrd1_g12 sndrd2_g12 sndrd3_g12 sndrd4_g12 sndrd1_g13 sndrd2_g13 sndrd3_g13 sndrd4_g13 sndrd1_g14 sndrd2_g14 sndrd3_g14 sndrd4_g14 sndrd1_g15 sndrd2_g15 sndrd3_g15 sndrd4_g15 hirs3_n15 hirs3_n16 hirs3_n17 amsua_n15 amsua_n16 amsua_n17 amsub_n15 amsub_n16 amsub_n17 hsb_aqua airs_aqua amsua_aqua imgr_g08 imgr_g11 imgr_g12 imgr_g14 imgr_g15 ssmi_f13 ssmi_f15 hirs4_n18 hirs4_metop-a amsua_n18 amsua_metop-a mhs_n18 mhs_metop-a amsre_low_aqua amsre_mid_aqua amsre_hig_aqua ssmis_f16 ssmis_f17 ssmis_f18 ssmis_f19 ssmis_f20 iasi_metop-a hirs4_n19 amsua_n19 mhs_n19 seviri_m08 seviri_m09 seviri_m10 seviri_m11 cris_npp cris-fsr_npp cris-fsr_n20 atms_npp atms_n20 hirs4_metop-b amsua_metop-b mhs_metop-b iasi_metop-b avhrr_metop-b avhrr_n18 avhrr_n19 avhrr_metop-a amsr2_gcom-w1 gmi_gpm saphir_meghat ahi_himawari8 abi_g16 abi_g17 amsua_metop-c mhs_metop-c iasi_metop-c avhrr_metop-c viirs-m_npp viirs-m_j1 abi_g18 ahi_himawari9 viirs-m_j2 cris-fsr_n21 atms_n21" + + diaglist[0]=listcnv + diaglist[1]=listpcp + diaglist[2]=listozn + diaglist[3]=listrad + + diagfile[0]=$CNVSTAT + diagfile[1]=$PCPSTAT + diagfile[2]=$OZNSTAT + diagfile[3]=$RADSTAT + + numfile[0]=0 + numfile[1]=0 + numfile[2]=0 + numfile[3]=0 + + # Set diagnostic file prefix based on lrun_subdirs variable + if [ $lrun_subdirs = ".true." ]; then + prefix=" dir.*/" + else + prefix="pe*" + fi + + if [ $USE_CFP = "YES" ]; then + [[ -f $DATA/diag.sh ]] && rm $DATA/diag.sh + [[ -f $DATA/mp_diag.sh ]] && rm $DATA/mp_diag.sh + cat > $DATA/diag.sh << EOFdiag +#!/bin/sh +lrun_subdirs=\$1 +binary_diag=\$2 +type=\$3 +loop=\$4 +string=\$5 +CDATE=\$6 +DIAG_COMPRESS=\$7 +DIAG_SUFFIX=\$8 +if [ \$lrun_subdirs = ".true." ]; then + prefix=" dir.*/" +else + prefix="pe*" +fi +file=diag_\${type}_\${string}.\${CDATE}\${DIAG_SUFFIX} +if [ \$binary_diag = ".true." ]; then + cat \${prefix}\${type}_\${loop}* > \$file +else + $CATEXEC -o \$file \${prefix}\${type}_\${loop}* +fi +if [ \$DIAG_COMPRESS = "YES" ]; then + $COMPRESS \$file +fi +EOFdiag + chmod 755 $DATA/diag.sh + fi + + # Collect diagnostic files as a function of loop and type. + # Loop over first and last outer loops to generate innovation + # diagnostic files for indicated observation types (groups) + # + # NOTE: Since we set miter=2 in GSI namelist SETUP, outer + # loop 03 will contain innovations with respect to + # the analysis. Creation of o-a innovation files + # is triggered by write_diag(3)=.true. The setting + # write_diag(1)=.true. turns on creation of o-g + # innovation files. + + loops="01 03" + for loop in $loops; do + case $loop in + 01) string=ges;; + 03) string=anl;; + *) string=$loop;; + esac + echo $(date) START loop $string >&2 + n=-1 + while [ $((n+=1)) -le $ntype ] ;do + for type in $(echo ${diagtype[n]}); do + count=$(ls ${prefix}${type}_${loop}* 2>/dev/null | wc -l) + if [ $count -gt 1 ]; then + if [ $USE_CFP = "YES" ]; then + echo "$nm $DATA/diag.sh $lrun_subdirs $binary_diag $type $loop $string $CDATE $DIAG_COMPRESS $DIAG_SUFFIX" | tee -a $DATA/mp_diag.sh + if [ ${CFP_MP:-"NO"} = "YES" ]; then + nm=$((nm+1)) + fi + else + if [ $binary_diag = ".true." ]; then + cat ${prefix}${type}_${loop}* > diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} + else + $CATEXEC -o diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} ${prefix}${type}_${loop}* + fi + fi + echo "diag_${type}_${string}.${CDATE}*" >> ${diaglist[n]} + numfile[n]=$(expr ${numfile[n]} + 1) + elif [ $count -eq 1 ]; then + cat ${prefix}${type}_${loop}* > diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} + if [ $DIAG_COMPRESS = "YES" ]; then + $COMPRESS diag_${type}_${string}.${CDATE}${DIAG_SUFFIX} + fi + echo "diag_${type}_${string}.${CDATE}*" >> ${diaglist[n]} + numfile[n]=$(expr ${numfile[n]} + 1) + fi + done + done + echo $(date) END loop $string >&2 + done + + # We should already be in $DATA, but extra cd to be sure. + cd $DATA + + # If requested, compress diagnostic files + if [ $DIAG_COMPRESS = "YES" -a $USE_CFP = "NO" ]; then + echo $(date) START $COMPRESS diagnostic files >&2 + for file in $(ls diag_*${CDATE}${DIAG_SUFFIX}); do + $COMPRESS $file + done + echo $(date) END $COMPRESS diagnostic files >&2 + fi + + if [ $USE_CFP = "YES" ] ; then + chmod 755 $DATA/mp_diag.sh + ncmd=$(cat $DATA/mp_diag.sh | wc -l) + if [ $ncmd -gt 0 ]; then + ncmd_max=$((ncmd < npe_node_max ? ncmd : npe_node_max)) + APRUNCFP_DIAG=$(eval echo $APRUNCFP) + $APRUNCFP_DIAG $DATA/mp_diag.sh + export err=$?; err_chk + fi + fi + + # Restrict diagnostic files containing rstprod data + rlist="conv_gps conv_ps conv_pw conv_q conv_sst conv_t conv_uv saphir" + for rtype in $rlist; do + set +e + ${CHGRP_CMD} *${rtype}* + ${STRICT_ON:-set -e} + done + + # If requested, create diagnostic file tarballs + if [ $DIAG_TARBALL = "YES" ]; then + echo $(date) START tar diagnostic files >&2 + n=-1 + while [ $((n+=1)) -le $ntype ] ;do + TAROPTS="-uvf" + if [ ! -s ${diagfile[n]} ]; then + TAROPTS="-cvf" + fi + if [ ${numfile[n]} -gt 0 ]; then + tar $TAROPTS ${diagfile[n]} $(cat ${diaglist[n]}) + export err=$?; err_chk + fi + done + + # Restrict CNVSTAT + chmod 750 $CNVSTAT + ${CHGRP_CMD} $CNVSTAT + + # Restrict RADSTAT + chmod 750 $RADSTAT + ${CHGRP_CMD} $RADSTAT + + echo $(date) END tar diagnostic files >&2 + fi +fi # End diagnostic file generation block - if [ $GENDIAG = "YES" ] + +################################################################################ +# Postprocessing +# If no processing error, remove $DIAG_DIR +if [[ "$REMOVE_DIAG_DIR" = "YES" && "$err" = "0" ]]; then + rm -rf $DIAG_DIR +fi + +cd $pwd +[[ "${mkdata:-YES}" = "YES" ]] && rm -rf $DATA + + +exit $err + diff --git a/scripts/exglobal_forecast.py b/scripts/exglobal_forecast.py new file mode 100755 index 0000000000..2b21934bfa --- /dev/null +++ b/scripts/exglobal_forecast.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 + +import os + +from pygw.logger import Logger, logit +from pygw.yaml_file import save_as_yaml +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.gfs_forecast import GFSForecast + +# initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL"), colored_log=True) + + +@logit(logger) +def main(): + + # instantiate the forecast + config = cast_strdict_as_dtypedict(os.environ) + save_as_yaml(config, f'{config.EXPDIR}/fcst.yaml') # Temporarily save the input to the Forecast + + fcst = GFSForecast(config) + fcst.initialize() + fcst.configure() + + +if __name__ == '__main__': + main() diff --git a/scripts/exglobal_forecast.sh b/scripts/exglobal_forecast.sh index 4c398e5055..d86691d5ec 100755 --- a/scripts/exglobal_forecast.sh +++ b/scripts/exglobal_forecast.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ################################################################################ ## UNIX Script Documentation Block ## Script name: exglobal_fcst_nemsfv3gfs.sh @@ -76,13 +77,9 @@ # Main body starts here ####################### -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "${HOMEgfs}/ush/preamble.sh" -SCRIPTDIR=$(dirname $(readlink -f "$0") )/../ush +SCRIPTDIR="${HOMEgfs}/ush" echo "MAIN: environment loaded for $machine platform,Current Script locates in $SCRIPTDIR." # include all subroutines. Executions later. @@ -126,18 +123,14 @@ common_predet echo $RUN case $RUN in 'data') DATM_predet;; - 'gfs') FV3_GFS_predet;; - 'gdas') FV3_GFS_predet;; - 'gefs') FV3_GEFS_predet;; + *gfs | *gdas | 'gefs') FV3_GFS_predet;; esac [[ $cplflx = .true. ]] && MOM6_predet -#[[ $cplwav = .true. ]] && WW3_predet #no WW3_predet at this time +[[ $cplwav = .true. ]] && WW3_predet [[ $cplice = .true. ]] && CICE_predet case $RUN in - 'gfs') FV3_GFS_det;; - 'gdas') FV3_GFS_det;; - 'gefs') FV3_GEFS_det;; + *gfs | *gdas | 'gefs') FV3_GFS_det;; esac #no run type determination for data atmosphere [[ $cplflx = .true. ]] && MOM6_det [[ $cplwav = .true. ]] && WW3_det @@ -149,9 +142,7 @@ echo "MAIN: Post-determination set up of run type" echo $RUN case $RUN in 'data') DATM_postdet;; - 'gfs') FV3_GFS_postdet;; - 'gdas') FV3_GFS_postdet;; - 'gefs') FV3_GEFS_postdet;; + *gfs | *gdas | 'gefs') FV3_GFS_postdet;; esac #no post determination set up for data atmosphere [[ $cplflx = .true. ]] && MOM6_postdet [[ $cplwav = .true. ]] && WW3_postdet @@ -162,10 +153,8 @@ echo "MAIN: Post-determination set up of run type finished" echo "MAIN: Writing name lists and model configuration" case $RUN in 'data') DATM_nml;; - 'gfs') FV3_GFS_nml;; - 'gdas') FV3_GFS_nml;; - 'gefs') FV3_GEFS_nml;; -esac #no namelist for data atmosphere + *gfs | *gdas | 'gefs') FV3_GFS_nml;; +esac [[ $cplflx = .true. ]] && MOM6_nml [[ $cplwav = .true. ]] && WW3_nml [[ $cplice = .true. ]] && CICE_nml @@ -173,9 +162,7 @@ esac #no namelist for data atmosphere case $RUN in 'data') DATM_model_configure;; - 'gfs') FV3_model_configure;; - 'gdas') FV3_model_configure;; - 'gefs') FV3_model_configure;; + *gfs | *gdas | 'gefs') FV3_model_configure;; esac echo "MAIN: Name lists and model configuration written" @@ -191,42 +178,22 @@ if [ $esmf_profile ]; then export ESMF_RUNTIME_PROFILE_OUTPUT=SUMMARY fi -if [ $machine != 'sandbox' ]; then - $NCP $FCSTEXECDIR/$FCSTEXEC $DATA/. - export OMP_NUM_THREADS=$NTHREADS_FV3 - $APRUN_FV3 $DATA/$FCSTEXEC 1>&1 2>&2 - export ERR=$? - export err=$ERR - $ERRSCRIPT || exit $err -else - echo "MAIN: mpirun launch here" -fi +$NCP $FCSTEXECDIR/$FCSTEXEC $DATA/. +$APRUN_UFS $DATA/$FCSTEXEC 1>&1 2>&2 +export ERR=$? +export err=$ERR +$ERRSCRIPT || exit $err -if [ $machine != 'sandbox' ]; then - case $RUN in - 'data') data_out_Data_ATM;; - 'gfs') data_out_GFS;; - 'gdas') data_out_GFS;; - 'gefs') data_out_GEFS;; - esac - [[ $cplflx = .true. ]] && MOM6_out - [[ $cplwav = .true. ]] && WW3_out - [[ $cplice = .true. ]] && CICE_out - [[ $esmf_profile = .true. ]] && CPL_out -else - echo "MAIN: Running on sandbox mode, no output linking" -fi +case $RUN in + 'data') data_out_Data_ATM;; + *gfs | *gdas | 'gefs') data_out_GFS;; +esac +[[ $cplflx = .true. ]] && MOM6_out +[[ $cplwav = .true. ]] && WW3_out +[[ $cplice = .true. ]] && CICE_out +[[ $esmf_profile = .true. ]] && CPL_out echo "MAIN: Output copied to COMROT" #------------------------------------------------------------------ -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXITING $0 with return code $err >&2 -fi -if [ $err != 0 ]; then - echo "MAIN: $confignamevarfornems Forecast failed" - exit $err -else - echo "MAIN: $confignamevarfornems Forecast completed at normal status" - exit 0 -fi +exit $err diff --git a/scripts/exglobal_prep_land_obs.py b/scripts/exglobal_prep_land_obs.py new file mode 100755 index 0000000000..69d15b95c8 --- /dev/null +++ b/scripts/exglobal_prep_land_obs.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# exglobal_land_analysis_prepare.py +# This script creates a LandAnalysis object +# and runs the prepare_IMS method +# which perform the pre-processing for IMS data +import os + +from pygw.logger import Logger +from pygw.configuration import cast_strdict_as_dtypedict +from pygfs.task.land_analysis import LandAnalysis + + +# Initialize root logger +logger = Logger(level=os.environ.get("LOGGING_LEVEL", "DEBUG"), colored_log=True) + + +if __name__ == '__main__': + + # Take configuration from environment and cast it as python dictionary + config = cast_strdict_as_dtypedict(os.environ) + + # Instantiate the land prepare task + LandAnl = LandAnalysis(config) + LandAnl.prepare_IMS() diff --git a/scripts/run_gfsmos_master.sh.cray b/scripts/run_gfsmos_master.sh.cray deleted file mode 100755 index e013f0b34c..0000000000 --- a/scripts/run_gfsmos_master.sh.cray +++ /dev/null @@ -1,330 +0,0 @@ -#!/bin/sh -####################################################################### -# run_gfsmos.sh -# -# History: -# 03/29/13 Scallion Created (Adapted from Eric Engle's -# "master" scripts) -# 12/03/13 Scallion Added ptype/wxgrid -# 04/18/18 J Wagner Set up to run GFSMOS after GFS FV3 retros -# -# Purpose: -# To run the GFS-MOS operational suite -# -####################################################################### -set -x - -if (( $# > 1 )); then - echo "Incorrect number of arguments." - echo "Syntax: $0 [PDYCYC (optional)]" - echo "Exiting..." - exit 1 -fi - -####################################################################### -# Source bash_profile to run with proper modules on cron -####################################################################### -#. ~/.profile - -####################################################################### -# Set global variables neede in the run script and/or each individual -# job script. -####################################################################### - - -. $MODULESHOME/init/sh 2>/dev/null -module load prod_envir/1.1.0 -module load cfp-intel-sandybridge -module load craype-hugepages2M -export FORT_BUFFERED=TRUE -export KMP_AFFINITY=disabled -export envir=prod -#export QUEUE=dev - -#------------------ -export SENDCOM_SAVE=$SENDCOM -export SENDCOM=YES -#------------------ - -dateIn=$1 -export PDY=$(echo $dateIn | cut -c 1-8) -export cyc=$(echo $dateIn | cut -c 9-10) -export prevday=$($NDATE -24 ${PDY}00 | cut -c1-8) - -# -# VARIABLES TO SET -# PTMPROOT and STMPROOT should be set to the user's directories -# COMDATEROOT defined by module prod_util -#export PTMPROOT=/gpfs/hps3/ptmp/$USER -export PTMPROOT=$ROTDIR/gfsmos.$PDY -#export STMPROOT=/gpfs/hps3/stmp/$USER -export STMPROOT=$RUNDIR/gfsmos.$PDY -export DATAROOT=$STMPROOT -export MODELROOT=$NWROOThps -export MODELDIR=$MODELROOT/gfsmos.v5.0.6 -export CODEDIR=$MODELROOT/mos_shared.v2.6.1 - -if [[ $SITE == "SURGE" ]]; then - export siteprefix=g -elif [[ $SITE == "LUNA" ]]; then - export siteprefix=t -else - echo "SITE $SITE not recognized" - exit -fi - -export range=${range:-"both"} -export skipmodel=n -export skipprep=n -export stnonly=Y -export cycle="t${cyc}z" -export pid="gfs.$$" -export dailylog=$PTMPROOT/dailylog/log.$PDY -export jlogfile=$dailylog/jlogfile_gfsmos -mkdir -p $dailylog - -export SENDDBN=NO -export GET_IOPROFILE=NO - -# Specify Execution Areas -export HOMEmdl=$MODELDIR -export HOMEcode=$CODEDIR - -if [ ! -d $HOMEmdl ]; then - echo "$HOMEmdl does not exist" - exit 1 -fi - -if [ ! -d $HOMEcode ]; then - echo "$HOMEcode does not exist" - exit 1 -fi - -# Load modules -module load prod_util - -module unload grib_util/1.0.3 -module use /usrx/local/nceplibs/modulefiles -module load grib_util/1.1.0 - -# VARIABLES TO SET -# GFSDIR should be set to the directory containing the input GFS FV3 data -# COMOUT should be set to the directory where the GFSMOS output files will be saved -#export GFSDIR=/gpfs/hps3/ptmp/emc.glopara/fv3fy18retro2 -export GFSDIR=$ROTDIR -export COMINgfs=$GFSDIR/gfs.${PDY}/${cyc} -export COMOUT=$ROTDIR/gfsmos.$PDY - -if [[ ! -d $PTMPROOT/qprod ]]; then - mkdir -p $PTMPROOT/qprod -fi - -if [[ ! -d $COMOUT ]]; then - mkdir -p $COMOUT -fi - -export PCOM=$COMOUT - -if [[ ! -d $PCOM ]]; then - mkdir -p $PCOM -fi - - -# NOTE: On WCOSS_C the directory from which bsub -# is executed must exist when the submitted job -# begins. Otherwise, the submitted job fails -# with TERM_CWD_NOTEXIST error. - -mkdir -p $DATAROOT -cd $DATAROOT - - -######################################################################## -# JGFSMOS_PREP47 -######################################################################## -export job=gfsmos_prep_${cyc}_${pid} -export COMIN=$GFSDIR -jobCard=$HOMEmdl/jobs/JGFSMOS_PREP -# Define DATA and create directory -export DATA=$DATAROOT/gfsmos_prep_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog - -export PTILE=7 -export NTASK=7 -export OMP_NUM_THREADS=1 - -bsub -J $job \ - -o $logfile \ - -q $QUEUE \ - -W 2:00 \ - -M 2500 \ - -P $ACCOUNT \ - -extsched 'CRAYLINUX[]' \ - -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ - $jobCard - -######################################################################## -# JGFSMOS_FORECAST -######################################################################## -if [ $prevday -lt $(date -u +%Y%m%d -d "48 hours ago") ]; then - export COMINhourly=$PTMPROOT/hourly.$PDY - if [[ ! -d $COMINhourly ]]; then - mkdir -p $COMINhourly - fi - \cp /gpfs/${siteprefix}d1/mdl/mdlstat/save/rotate/hry/${PDY}03 $COMINhourly/sfctbl.03 - \cp /gpfs/${siteprefix}d1/mdl/mdlstat/save/rotate/hry/${PDY}09 $COMINhourly/sfctbl.09 - \cp /gpfs/${siteprefix}d1/mdl/mdlstat/save/rotate/hry/${PDY}15 $COMINhourly/sfctbl.15 - \cp /gpfs/${siteprefix}d1/mdl/mdlstat/save/rotate/hry/${PDY}21 $COMINhourly/sfctbl.21 -fi - -# -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT -# -export job=gfsmos_fcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_FORECAST -export DATA=$DATAROOT/gfsmos_fcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -deps="done(gfsmos_prep_${cyc}_${pid})" -complist="metar cooprfcmeso tstms" -complist2="" -export PTILE=3 -export NTASK=3 -export OMP_NUM_THREADS=1 - -bsub -J ${job} -oo $logfile -q ${QUEUE} -P $ACCOUNT \ - -W 2:30 -M 2000 \ - -extsched 'CRAYLINUX[]' \ - -R '1*{select[craylinux && !vnode]} + 72*{select[craylinux && vnode]span[ptile=24]}' \ - -w "$deps" \ - $jobCard -# -######################################################################## -# JGFSMOS_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_PRDGEN -export job=gfsmos_prdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$DATAROOT/gfsmos_prdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -deps="done(gfsmos_fcst_${cyc}_${pid})" -nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' -export PTILE=1 -export NTASK=1 -export OMP_NUM_THREADS=1 -# -bsub -J ${job} -oo $logfile -q ${QUEUE} -P $ACCOUNT \ - -W 1:00 -M 2000 \ - -extsched 'CRAYLINUX[]' \ - -R "$nodes" \ - -w "$deps" \ - $jobCard -# -######################################################################## -# EXTENDED-RANGE JOBS -######################################################################## - -######################################################################## -# JGFSMOS_EXT_PREP47 -######################################################################## -export job=gfsmos_extprep_${cyc}_${pid} -export COMIN=$GFSDIR -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_PREP -# Define DATA and create directory -export DATA=$DATAROOT/gfsmos_extprep_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog - -export PTILE=5 -export NTASK=10 -export OMP_NUM_THREADS=1 - -bsub -J $job \ - -o $logfile \ - -q $QUEUE \ - -W 2:00 \ - -M 2500 \ - -P $ACCOUNT \ - -extsched 'CRAYLINUX[]' \ - -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ - $jobCard - - -# Skip EXT_FORECAST for 06/18 -if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -######################################################################## -# JGFSMOS_EXT_FORECAST -######################################################################## -# -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT -# -export job=gfsmos_extfcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_FORECAST -export DATA=$DATAROOT/gfsmos_extfcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" && "$range" == "both" ]]; then - deps="done(gfsmos_extprep_${cyc}_${pid}) && done(gfsmos_prdgen_${cyc}_${pid})" -elif [[ "$skipprep" != "y" && "$range" == "ext" ]]; then - deps="done(gfsmos_extprep_${cyc}_${pid})" -elif [[ "$skipprep" == "y" && "$range" == "ext" ]]; then - deps="" -else - deps="done(gfsmos_prdgen_${cyc}_${pid})" -fi -# -export PTILE=4 -export NTASK=10 -export OMP_NUM_THREADS=1 -# -bsub -J ${job} -oo $logfile -q $QUEUE -P $ACCOUNT \ - -W 4:00 -M 2000 \ - -extsched 'CRAYLINUX[]' \ - -R '1*{select[craylinux && !vnode]} + 72*{select[craylinux && vnode]span[ptile=24]}' \ - -w "$deps" \ - $jobCard -# -fi #endif for skipping 06/18 ext_fcst -######################################################################### -## JGFSMOS_EXT_PRDGEN -######################################################################### -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_PRDGEN -export job=gfsmos_extprdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$DATAROOT/gfsmos_extprdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$cyc" == "06" || "$cyc" == "18" ]]; then - deps="done(gfsmos_prdgen_${cyc}_${pid})" -elif [[ "$range" == "both" ]]; then - deps="done(gfsmos_extfcst_${cyc}_${pid}) && done(gfsmos_prdgen_${cyc}_${pid})" -else - deps="done(gfsmos_extfcst_${cyc}_${pid})" -fi -nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' -export PTILE=1 -export NTASK=1 -export OMP_NUM_THREADS=1 - -bsub -J ${job} -oo $logfile -q ${QUEUE} -P $ACCOUNT \ - -W 2:00 -M 2000 \ - -extsched 'CRAYLINUX[]' \ - -R "$nodes" \ - -w "$deps" \ - $jobCard - - -#--------------------- -export SENDCOM=$SENDCOM_SAVE diff --git a/scripts/run_gfsmos_master.sh.dell b/scripts/run_gfsmos_master.sh.dell deleted file mode 100755 index eb255e11f5..0000000000 --- a/scripts/run_gfsmos_master.sh.dell +++ /dev/null @@ -1,765 +0,0 @@ -#!/bin/sh -####################################################################### -# run_gfsmos.sh -# -# History: -# 03/29/13 Scallion Created (Adapted from Eric Engle's -# "master" scripts) -# 12/03/13 Scallion Added ptype/wxgrid -# -# Purpose: -# To run the GFS-MOS operational suite -# -####################################################################### -set -x - -if (( $# > 1 )); then - echo "Incorrect number of arguments." - echo "Syntax: $0 [PDYCYC (optional)]" - echo "Exiting..." - exit 1 -fi - -##export PATH=./:$PATH - -####################################################################### -# Only run on the dev machine -####################################################################### -#/u/Scott.Scallion/bin/run-on-dev.sh -#[[ $? -eq 1 ]] && exit 1 - -####################################################################### -# Source bash_profile to run with proper modules on cron -####################################################################### -#elim . ~/.bash_profile 1> /dev/null 2>&1 -#. ~/.bash_profile - -####################################################################### -# Check the host to determine whether tide or gyre is prod -####################################################################### -#chkhost=$(hostname | cut -c1) -if [[ $SITE == "GYRE" ]] || [[ $SITE == "SURGE" ]] || [[ $SITE == "VENUS" ]]; then - gort="g" -elif [[ $SITE == "TIDE" ]] || [[ $SITE == "LUNA" ]] || [[ $SITE == "MARS" ]]; then - gort="t" -fi - -####################################################################### -# Set global variables neede in the run script and/or each individual -# job script. -####################################################################### -. $MODULESHOME/init/bash -#module purge 2>/dev/null -module load EnvVars/1.0.2 2>/dev/null -module load ips/18.0.1.163 2>/dev/null -module load impi/18.0.1 2>/dev/null -module load lsf/10.1 2>/dev/null -module load prod_envir/1.0.3 2>/dev/null -module load prod_util/1.1.4 2>/dev/null -module load CFP/2.0.1 2>/dev/null - -module use -a /gpfs/dell1/nco/ops/nwpara/modulefiles/compiler_prod/ips/18.0.1 -module load grib_util/1.1.1 2>/dev/null - -export FORT_BUFFERED=TRUE -export KMP_AFFINITY=disabled -export envir=prod -export RUN_ENVIR=${RUN_ENVIR:-""} -#export QUEUE=dev - -#------------------ -export SENDCOM=YES -export SENDCOM_SAVE=$SENDCOM -#------------------ - -#-------------------------------- -# COMDATEROOT defined by module prod_util -##export PTMPROOT=/gpfs/dell2/ptmp/$USER -##export STMPROOT=/gpfs/dell2/stmp/$USER -##export MODELROOT=/gpfs/dell2/mdl/mdlstat/noscrub/usr/$USER/nwprod -##export MODELDIR=$MODELROOT/gfsmos.v5.0.6 -##export CODEDIR=$MODELROOT/mos_shared.v2.6.1 - -export PTMPROOT=$ROTDIR/gfsmos.$PDY -export STMPROOT=$RUNDIR/gfsmos.$PDY -export DATAROOT=$STMPROOT -##export MODELROOT=/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/gfsmos -export MODELROOT=/gpfs/dell2/mdl/mdlstat/noscrub/usr/Scott.Scallion/gfsv16 -#export MODELDIR=$MODELROOT/gfsmos.v5.2.0.1 -export MODELDIR=$MODELROOT/gfsmos.v5.2.0.1-restructure -export CODEDIR=/gpfs/dell1/nco/ops/nwpara/mos_shared.v2.6.5 -#-------------------------------- - -#-------------------------------- -##export PDY=20180419 -##export PDY=$(date -u +%Y%m%d) -##export prevday=$(date -u --date="${PDY} 0000 UTC 24 hours ago" +%Y%m%d) - -dateIn=$1 -#if [ $REALTIME = "YES" ]; then -# GDATE=$($NDATE -24 $dateIn) -# dateIn=$GDATE -#fi -export PDY=$(echo $dateIn | cut -c 1-8) -export cyc=$(echo $dateIn | cut -c 9-10) -export prevday=$($NDATE -24 ${PDY}00 | cut -c1-8) -#-------------------------------- - - -#-------------------------------- -##let hour=$(date -u +"%-H") -##if [[ $hour -ge 4 && $hour -lt 10 ]]; then -## export cyc=00 -## export range=both -##elif [[ $hour -ge 10 && $hour -lt 16 ]]; then -## export cyc=06 -## export range=both -##elif [[ $hour -ge 16 && $hour -lt 22 ]]; then -## export cyc=12 -## export range=both -##elif [[ $hour -ge 22 && $hour -le 23 ]]; then -## export cyc=18 -## export range=both -##else -## export cyc=18 -## export PDY=$prevday -## export range=both -##fi -## -##cyc_list="00 06 12 18" -##if [[ $# == 1 ]] && [[ $cyc_list =~ $1 ]]; then -## export cyc=$1 -## if [ "$cyc" == "00" -o "$cyc" == "12" ]; then -## export range=both -## else -## export range=both -## fi -##elif [[ $# == 1 ]]; then -## echo "$1 is not a valid cycle (choose 00 or 12)" -## exit 1 -##fi - -# ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -export range=${range:-"both"} - -#if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -# if [ $cyc -eq 00 ]; then -# export range=both -# else -# export range=short -# fi -# ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -export stnonly='Y' -export skipmodel=n -export skipprep=n -export cycle="t${cyc}z" -export pid="gfs_qprod.$$" -export dailylog=$PTMPROOT/dailylog/log.$PDY -export jlogfile=$dailylog/jlogfile_gfsmos -mkdir -p $dailylog - -export SENDDBN=NO -export SENDDBN_NTC=NO -export GET_IOPROFILE=NO - -# Specify Execution Areas -export HOMEmdl=$MODELDIR -export HOMEcode=$CODEDIR -#export utilscript=/gpfs/hps/mdl/mdlstat/noscrub/usr/Scott.Scallion/ush - -if [ ! -d $HOMEmdl ]; then - echo "$HOMEmdl does not exist" - exit 1 -fi - -if [ ! -d $HOMEcode ]; then - echo "$HOMEcode does not exist" - exit 1 -fi - - -#------------------------------------- -# Define COMOUT (COMIN will vary by job) -#export GFSDIR=$COMROOThps/gfs/prod/gfs.$PDY -##export GFSDIR=$COMROOThps/gfs/prod/gfs.$PDY -##export COMINgfs=$GFSDIR -##export COMOUT=$PTMPROOT/qprod/gfsmos.$PDY - -export GFSDIR=$COMROOT/gfs/prod/gfs.${PDY} -if [[ "$RUN_ENVIR" = "emc" ]] ; then - export GFSDIR=$ROTDIR/gfs.${PDY} -fi -export COMINgfs=$GFSDIR -export COMOUT=$ROTDIR/gfsmos.$PDY - -#export COMINm1=$PTMPROOT/gfsmos.$prevday -#export COMINm1=$PTMPROOT/qprod/gfsmos_gmos_pre-nbmv2.$prevday -#export COMINm1=$PTMPROOT/qprod/gfsmos.$prevday - -if [[ ! -d $PTMPROOT/qprod ]]; then - mkdir -p $PTMPROOT/qprod -fi - -if [[ ! -d $COMOUT ]]; then - mkdir -p $COMOUT -fi - -export COMOUTwmo=$PTMPROOT/wmo - -if [[ ! -d $COMOUTwmo ]]; then - mkdir -p $COMOUTwmo -fi - - -# NOTE: On WCOSS_DELL_P3 the directory from which bsub -# is executed must exist when the submitted job -# begins. Otherwise, the submitted job fails -# with TERM_CWD_NOTEXIST error. - -mkdir -p $DATAROOT -cd $DATAROOT - - -if [ "$range" == "short" -o "$range" == "both" ]; then -######################################################################## -######################################################################## -# SHORT-RANGE JOBS -######################################################################## -######################################################################## - -######################################################################## -# Wait for 1 degree model data -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f096 ]]; then -## echo "Model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done -## -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - -######################################################################## -# Wait for 1/4 degree model data -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.0p25.f096 ]]; then -## echo "Model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done - -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - -######################################################################## -# Wait for 1.0 degree GFS model files before running (Pacific MOS) -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f096 ]]; then -## echo "1.0 degree model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done -## -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - - -######################################################################## -# JGFSMOS_PREP47 -######################################################################## -if [[ "$skipprep" != "y" ]]; then - export job=gfsmos_prep_${cyc}_${pid} - export COMIN=$GFSDIR - jobCard=$HOMEmdl/jobs/JGFSMOS_STN_PREP -# Define DATA and create directory - export DATA=$STMPROOT/qprod/gfsmos_prep_${cyc} - export logfile=$dailylog/$job.out - export out_dir=$dailylog - - export NTASK=3 - export PTILE=3 - export OMP_NUM_THREADS=1 - - bsub -J $job \ - -o $logfile \ - -q $QUEUE \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 0:25 \ - -P $ACCOUNT \ - $jobCard - -fi #end for skipprep - -######################################################################## -# JGFSMOS_FORECAST -######################################################################## -if [[ ! -d /gpfs/dell1/nco/ops/com/mos/prod/hry_mos.$PDY ]]; then - export ROTATE=/gpfs/dell2/mdl/mdlstat/noscrub/rotate - export COMINhry_mos=$PTMPROOT/hourly.$PDY - if [[ ! -d $COMINhry_mos ]]; then - mkdir -p $COMINhry_mos - fi - \cp $ROTATE/hry/${PDY}03 $COMINhry_mos/sfctbl.03 - \cp $ROTATE/hry/${PDY}09 $COMINhry_mos/sfctbl.09 - \cp $ROTATE/hry/${PDY}15 $COMINhry_mos/sfctbl.15 - \cp $ROTATE/hry/${PDY}21 $COMINhry_mos/sfctbl.21 -fi - -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT - -export job=gfsmos_fcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_STN_FORECAST -export DATA=$STMPROOT/qprod/gfsmos_fcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" ]]; then - deps="done(gfsmos_prep_${cyc}_${pid})" -else - deps="" -fi -if [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" != "Y" ]]; then - complist="metar pac cooprfcmeso goe higoe akgoe tstms" - complist2="copopo3 coptype akpopo3 akptype" -else - complist="metar cooprfcmeso tstms" - complist2="" -fi - -if [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" != "Y" ]]; then - export NTASK=11 - export PTILE=1 - export OMP_NUM_THREADS=1 -elif [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" == "Y" ]]; then - export NTASK=5 - export PTILE=5 - export OMP_NUM_THREADS=1 -else - export NTASK=4 - export PTILE=4 - export OMP_NUM_THREADS=1 -fi - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 0:20 \ - -P $ACCOUNT \ - -w "$deps" \ - $jobCard - -######################################################################## -# JGFSMOS_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_STN_PRDGEN -export job=gfsmos_prdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$STMPROOT/qprod/gfsmos_prdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -deps="done(gfsmos_fcst_${cyc}_${pid})" -# Set Nodes -if [ $cyc -eq 00 -o $cyc -eq 12 ] && [[ "$stnonly" != "Y" ]]; then - #nodes='1*{select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=20 -else - #nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=1 -fi - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 0:30 \ - -P $ACCOUNT \ - -w "$deps" \ - $jobCard - -######################################################################## -# JGFSMOS_WX_PRDGEN (00z and 12z only) -######################################################################## -#if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -# jobCard=$HOMEmdl/jobs/JGFSMOS_WX_PRDGEN -# export job=gfsmos_wx_prdgen_${cyc}_${pid} -# # Change COMIN back to COMOUT -# export COMIN=$COMOUT -# # Define DATA and create directory -# export DATA=$STMPROOT/qprod/gfsmos_wx_prdgen_${cyc} -# export logfile=$dailylog/$job.out -# export out_dir=$dailylog -# # Set dependencies -# deps="done(gfsmos_prdgen_${cyc}_${pid})" -# -# export NTASK=2 -# export PTILE=1 -# export OMP_NUM_THREADS=20 -# -# #bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -# # -W 1:00 -M 1000 \ -# # -extsched 'CRAYLINUX[]' \ -# # -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ -# # -w "$deps" \ -# # $jobCard -# -# bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -M 3000 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard -#fi - -fi #endif short - -######################################################################## -######################################################################## -# EXTENDED-RANGE JOBS -######################################################################## -######################################################################## - -if [ "$range" == "ext" -o "$range" == "both" ]; then - -######################################################################## -# Wait for 1/4 degree model data -######################################################################## -#if [ "$skipmodel" != "y" ]; then -#let attempts=1 -#proj_list=$(seq -f %03g 252 12 384) -#for tau in $proj_list -#do -# while [[ $attempts -le 120 ]] -# do -# if [[ -f $GFSDIR/gfs.$cycle.pgrb2.0p25.f${tau} && -f $GFSDIR/gfs.$cycle.pgrb2.0p50.f${tau} ]]; then -# echo "Model file found. Proceeding to next..." -# break -# else -# if [[ $attempts -le 60 ]]; then -# sleep 60 -# else -# sleep 180 -# fi -# attempts=$((attempts+1)) -# fi -# done -#done -# -#if [[ $attempts -gt 120 ]]; then -# echo "Waited 4 hours for model file, but it did not appear." -# echo "Exiting..." -# exit 1 -#fi - -#fi #endif for skipmodel - -######################################################################## -# Wait for 1.0/2.5 degree GFS model files before running (Pacific GFS) -######################################################################## -#if [ "$skipmodel" != "y" ]; then -#let attempts1deg=1 -#proj_list=$(seq -f %03g 204 12 384) -#for tau in $proj_list -#do -# while [[ $attempts1deg -le 120 ]] -# do -## ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -# #if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f384 && -f $GFSDIR/gfs.$cycle.pgrb2.2p50.f240 ]]; then -# if [ $cyc -eq 00 ]; then -# waitfile=$GFSDIR/gfs.$cycle.pgrb2.1p00.f384 -# else -# waitfile=$GFSDIR/gfs.$cycle.pgrb2.1p00.f180 -# fi -# if [[ -f $waitfile ]]; then -## ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -# #echo "1.0/2.5 degree model files found. Proceeding to next..." -# echo "1.0 degree model files found. Proceeding to next..." -# break -# else -# if [[ $attempts1deg -le 60 ]]; then -# sleep 60 -# else -# sleep 180 -# fi -# attempts1deg=$((aattempts1deg+1)) -# fi -# done -#done -# -#if [[ $attempts1deg -gt 120 ]]; then -# echo "Waited 4 hours for 1.0 degree model file, but it did not appear." -# echo "Exiting..." -# exit 1 -#fi - -#fi #endif for skipmodel - -######################################################################## -# JGFSMOS_EXT_PREP47 -######################################################################## -if [[ "$skipprep" != "y" ]]; then - export job=gfsmos_extprep_${cyc}_${pid} - export COMIN=$GFSDIR - jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_PREP -# Define DATA and create directory - export DATA=$STMPROOT/qprod/gfsmos_extprep_${cyc} - export logfile=$dailylog/$job.out - export out_dir=$dailylog - - export NTASK=2 - export PTILE=2 - export OMP_NUM_THREADS=1 - -# bsub -J $job \ -# -o $logfile \ -# -q $QUEUE \ -# -W 2:00 \ -# -M 2500 \ -# -P MDLST-T2O \ -# -extsched 'CRAYLINUX[]' \ -# -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ -# $jobCard - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 2:00 \ - -P $ACCOUNT \ - $jobCard - -fi #end for skipprep - -# Skip EXT_FORECAST for 06/18 -if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -######################################################################## -# JGFSMOS_EXT_FORECAST -######################################################################## -if [[ ! -d /gpfs/dell1/nco/ops/com/mos/prod/hry_mos.$PDY ]]; then - export ROTATE=/gpfs/dell2/mdl/mdlstat/noscrub/rotate - export COMINhry_mos=$PTMPROOT/hourly.$PDY - if [[ ! -d $COMINhry_mos ]]; then - mkdir -p $COMINhry_mos - fi - \cp $ROTATE/hry/${PDY}03 $COMINhry_mos/sfctbl.03 - \cp $ROTATE/hry/${PDY}09 $COMINhry_mos/sfctbl.09 - \cp $ROTATE/hry/${PDY}15 $COMINhry_mos/sfctbl.15 - \cp $ROTATE/hry/${PDY}21 $COMINhry_mos/sfctbl.21 -fi - -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT - -export job=gfsmos_extfcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_FORECAST -export DATA=$STMPROOT/qprod/gfsmos_extfcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" && "$range" == "both" ]]; then - deps="done(gfsmos_extprep_${cyc}_${pid}) && done(gfsmos_fcst_${cyc}_${pid})" -elif [[ "$skipprep" != "y" && "$range" == "ext" ]]; then - deps="done(gfsmos_extprep_${cyc}_${pid})" -elif [[ "$skipprep" == "y" && "$range" == "ext" ]]; then - deps="" -else - deps="done(gfsmos_fcst_${cyc}_${pid})" -fi - -if [[ $stnonly != "Y" ]]; then - export NTASK=10 - export PTILE=1 - export OMP_NUM_THREADS=1 -else - export NTASK=3 - export PTILE=3 - export OMP_NUM_THREADS=1 -fi - -#bsub -J ${job} -oo $logfile -q $QUEUE -P MDLST-T2O \ -# -W 1:00 -M 2000 \ -# -extsched 'CRAYLINUX[]' \ -# -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ -# -w "$deps" \ -# $jobCard - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 1:00 \ - -P $ACCOUNT \ - -w "$deps" \ - $jobCard - -fi #endif for skipping 06/18 ext_fcst -######################################################################## -# JGFSMOS_EXT_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_PRDGEN -export job=gfsmos_extprdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$STMPROOT/qprod/gfsmos_extprdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$cyc" == "06" || "$cyc" == "18" ]]; then - deps="done(gfsmos_prdgen_${cyc}_${pid})" -elif [[ "$range" == "both" ]]; then - deps="done(gfsmos_extfcst_${cyc}_${pid}) && done(gfsmos_prdgen_${cyc}_${pid})" -else - deps="done(gfsmos_extfcst_${cyc}_${pid})" -fi -# Set Nodes -if [ $cyc -eq 00 -o $cyc -eq 12 ] && [[ "$stnonly" != "Y" ]]; then - #nodes='1*{select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=20 -else - #nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=1 -fi - - -#bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -# -W 1:00 -M 2000 \ -# -extsched 'CRAYLINUX[]' \ -# -R "$nodes" \ -# -w "$deps" \ -# $jobCard - -bsub -J ${job} \ - -o $logfile \ - -q ${QUEUE} \ - -x \ - -n $NTASK \ - -R "span[ptile=$PTILE]" \ - -W 1:00 \ - -P $ACCOUNT \ - -w "$deps" \ - $jobCard - -# Exit here with $cyc is 06 or 18 -if [ $cyc -eq 06 -o $cyc -eq 18 ]; then - exit 0 -fi -######################################################################## -# JGFSMOS_WX_EXT_PRDGEN -######################################################################## -#jobCard=$HOMEmdl/jobs/JGFSMOS_WX_EXT_PRDGEN -#export job=gfsmos_wx_extprdgen_${cyc}_${pid} -## Change COMIN back to COMOUT -#export COMIN=$COMOUT -## Define DATA and create directory -#export DATA=$STMPROOT/qprod/gfsmos_wx_extprdgen_${cyc} -#export logfile=$dailylog/$job.out -#export out_dir=$dailylog -## Set dependencies -#if [[ "$range" == "both" ]]; then -# deps="done(gfsmos_extprdgen_${cyc}_${pid}) && done(gfsmos_wx_prdgen_${cyc}_${pid})" -#else -# deps="done(gfsmos_extprdgen_${cyc}_${pid})" -#fi -# -#export NTASK=1 -#export PTILE=1 -#export OMP_NUM_THREADS=20 - -##bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -## -W 1:00 -M 1000 \ -## -extsched 'CRAYLINUX[]' \ -## -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ -## -w "$deps" \ -## $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -M 3000 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -fi #endif for ext/both - -#--------------------- -export SENDCOM=$SENDCOM_SAVE - -exit 0 diff --git a/scripts/run_gfsmos_master.sh.hera b/scripts/run_gfsmos_master.sh.hera deleted file mode 100755 index 04bfda61c5..0000000000 --- a/scripts/run_gfsmos_master.sh.hera +++ /dev/null @@ -1,833 +0,0 @@ -#!/bin/sh -####################################################################### -# run_gfsmos.sh -# -# History: -# 03/29/13 Scallion Created (Adapted from Eric Engle's -# "master" scripts) -# 12/03/13 Scallion Added ptype/wxgrid -# -# Purpose: -# To run the GFS-MOS operational suite -# -####################################################################### -set -x - -if (( $# > 1 )); then - echo "Incorrect number of arguments." - echo "Syntax: $0 [PDYCYC (optional)]" - echo "Exiting..." - exit 1 -fi - -export PATH=./:$PATH - -####################################################################### -# Only run on the dev machine -####################################################################### -#/u/Scott.Scallion/bin/run-on-dev.sh -#[[ $? -eq 1 ]] && exit 1 - -####################################################################### -# Source bash_profile to run with proper modules on cron -####################################################################### -#elim . ~/.bash_profile 1> /dev/null 2>&1 -#. ~/.bash_profile - -####################################################################### -# Check the host to determine whether tide or gyre is prod -####################################################################### -#chkhost=$(hostname | cut -c1) -#if [[ $SITE == "GYRE" ]] || [[ $SITE == "SURGE" ]] || [[ $SITE == "VENUS" ]]; then -# gort="g" -#elif [[ $SITE == "TIDE" ]] || [[ $SITE == "LUNA" ]] || [[ $SITE == "MARS" ]]; then -# gort="t" -#fi - -####################################################################### -# Set global variables neede in the run script and/or each individual -# job script. -####################################################################### -#. $MODULESHOME/init/bash -#module purge 2>/dev/null -#module load EnvVars/1.0.2 2>/dev/null -#module load ips/18.0.1.163 2>/dev/null -#module load impi/18.0.1 2>/dev/null -#module load lsf/10.1 2>/dev/null -#module load prod_envir/1.0.3 2>/dev/null -#module load prod_util/1.1.3 2>/dev/null -#module load CFP/2.0.1 2>/dev/null -# -#module use -a /gpfs/dell1/nco/ops/nwpara/modulefiles/compiler_prod/ips/18.0.1 -#module load grib_util/1.1.1 2>/dev/null - -# HERA -module load intel/18.0.5.274 -module load impi/2018.0.4 - -module use /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles -module load bacio/2.0.3 -module load bufr/11.3.0 -module load g2/3.1.1 -module load jasper/1.900.1 -module load png/1.2.44 -module load w3emc/2.4.0 -module load w3nco/2.0.7 -module load z/1.2.11 - -module use /scratch1/NCEPDEV/mdl/nwprod/modulefiles -module load prod_util/1.0.14 - -module use /scratch1/NCEPDEV/mdl/apps/modulefiles -module load CFP/2.0.1 - -export FORT_BUFFERED=TRUE -export KMP_AFFINITY=disabled -export envir=prod -export RUN_ENVIR=${RUN_ENVIR:-""} -#export QUEUE=dev - -#------------------ -export SENDCOM=YES -export SENDCOM_SAVE=$SENDCOM -#------------------ - -#-------------------------------- -# COMDATEROOT defined by module prod_util -##export PTMPROOT=/gpfs/dell2/ptmp/$USER -##export STMPROOT=/gpfs/dell2/stmp/$USER -##export MODELROOT=/gpfs/dell2/mdl/mdlstat/noscrub/usr/$USER/nwprod -##export MODELDIR=$MODELROOT/gfsmos.v5.0.6 -##export CODEDIR=$MODELROOT/mos_shared.v2.6.1 - -export PTMPROOT=$ROTDIR/gfsmos.$PDY -export STMPROOT=$RUNDIR/gfsmos.$PDY -export DATAROOT=$STMPROOT -##export MODELROOT=/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/gfsmos -#export MODELROOT=/gpfs/dell2/mdl/mdlstat/noscrub/usr/Scott.Scallion/gfsv16 -#export MODELROOT=/scratch1/NCEPDEV/mdl/nwprod -export MODELROOT=/scratch1/NCEPDEV/global/glopara/git/global-workflow -#export MODELDIR=$MODELROOT/gfsmos.v5.2.0.1 -export MODELDIR=$MODELROOT/gfsmos.v5.2.0 -#export CODEDIR=/gpfs/dell1/nco/ops/nwpara/mos_shared.v2.6.5 -export CODEDIR=$MODELROOT/mos_shared.v2.6.5 -#-------------------------------- - -#-------------------------------- -##export PDY=20180419 -##export PDY=$(date -u +%Y%m%d) -##export prevday=$(date -u --date="${PDY} 0000 UTC 24 hours ago" +%Y%m%d) - -dateIn=$1 -#if [ $REALTIME = "YES" ]; then -# GDATE=$($NDATE -24 $dateIn) -# dateIn=$GDATE -#fi -export PDY=$(echo $dateIn | cut -c 1-8) -export cyc=$(echo $dateIn | cut -c 9-10) -export prevday=$($NDATE -24 ${PDY}00 | cut -c1-8) -#-------------------------------- - - -#-------------------------------- -##let hour=$(date -u +"%-H") -##if [[ $hour -ge 4 && $hour -lt 10 ]]; then -## export cyc=00 -## export range=both -##elif [[ $hour -ge 10 && $hour -lt 16 ]]; then -## export cyc=06 -## export range=both -##elif [[ $hour -ge 16 && $hour -lt 22 ]]; then -## export cyc=12 -## export range=both -##elif [[ $hour -ge 22 && $hour -le 23 ]]; then -## export cyc=18 -## export range=both -##else -## export cyc=18 -## export PDY=$prevday -## export range=both -##fi -## -##cyc_list="00 06 12 18" -##if [[ $# == 1 ]] && [[ $cyc_list =~ $1 ]]; then -## export cyc=$1 -## if [ "$cyc" == "00" -o "$cyc" == "12" ]; then -## export range=both -## else -## export range=both -## fi -##elif [[ $# == 1 ]]; then -## echo "$1 is not a valid cycle (choose 00 or 12)" -## exit 1 -##fi - -# ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -export range=${range:-"both"} - -#if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -# if [ $cyc -eq 00 ]; then -# export range=both -# else -# export range=short -# fi -# ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -export stnonly='Y' -export skipmodel=n -export skipprep=n -export cycle="t${cyc}z" -export pid="gfs_qprod.$$" -export dailylog=$PTMPROOT/dailylog/log.$PDY -export jlogfile=$dailylog/jlogfile_gfsmos -mkdir -p $dailylog - -export SENDDBN=NO -export SENDDBN_NTC=NO -export GET_IOPROFILE=NO - -# Specify Execution Areas -export HOMEmdl=$MODELDIR -export HOMEcode=$CODEDIR -#export utilscript=/gpfs/hps/mdl/mdlstat/noscrub/usr/Scott.Scallion/ush - -if [ ! -d $HOMEmdl ]; then - echo "$HOMEmdl does not exist" - exit 1 -fi - -if [ ! -d $HOMEcode ]; then - echo "$HOMEcode does not exist" - exit 1 -fi - - -#------------------------------------- -# Define COMOUT (COMIN will vary by job) -#export GFSDIR=$COMROOThps/gfs/prod/gfs.$PDY -##export GFSDIR=$COMROOThps/gfs/prod/gfs.$PDY -##export COMINgfs=$GFSDIR -##export COMOUT=$PTMPROOT/qprod/gfsmos.$PDY - -export GFSDIR=$COMROOT/gfs/prod/gfs.${PDY} -if [[ "$RUN_ENVIR" = "emc" ]] ; then - export GFSDIR=$ROTDIR/gfs.${PDY} -fi -export COMINgfs=$GFSDIR -export COMOUT=$ROTDIR/gfsmos.$PDY - -#export COMINm1=$PTMPROOT/gfsmos.$prevday -#export COMINm1=$PTMPROOT/qprod/gfsmos_gmos_pre-nbmv2.$prevday -#export COMINm1=$PTMPROOT/qprod/gfsmos.$prevday - -if [[ ! -d $PTMPROOT/qprod ]]; then - mkdir -p $PTMPROOT/qprod -fi - -if [[ ! -d $COMOUT ]]; then - mkdir -p $COMOUT -fi - -export COMOUTwmo=$PTMPROOT/wmo - -if [[ ! -d $COMOUTwmo ]]; then - mkdir -p $COMOUTwmo -fi - - -# NOTE: On WCOSS_DELL_P3 the directory from which bsub -# is executed must exist when the submitted job -# begins. Otherwise, the submitted job fails -# with TERM_CWD_NOTEXIST error. - -mkdir -p $DATAROOT -cd $DATAROOT - - -if [ "$range" == "short" -o "$range" == "both" ]; then -######################################################################## -######################################################################## -# SHORT-RANGE JOBS -######################################################################## -######################################################################## - -######################################################################## -# Wait for 1 degree model data -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f096 ]]; then -## echo "Model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done -## -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - -######################################################################## -# Wait for 1/4 degree model data -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.0p25.f096 ]]; then -## echo "Model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done - -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - -######################################################################## -# Wait for 1.0 degree GFS model files before running (Pacific MOS) -######################################################################## -##if [ "$skipmodel" != "y" ]; then -##let attempts=1 -##while [[ $attempts -le 120 ]] -##do -## if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f096 ]]; then -## echo "1.0 degree model file found. Proceeding..." -## break -## else -## if [[ $attempts -le 60 ]]; then -## sleep 60 -## else -## sleep 180 -## fi -## attempts=$((attempts+1)) -## fi -##done -## -##if [[ $attempts -gt 120 ]]; then -## echo "Waited 4 hours for model file, but it did not appear." -## echo "Exiting..." -## exit 1 -##fi -## -##fi #endif for skipmodel - - -######################################################################## -# JGFSMOS_PREP47 -######################################################################## -if [[ "$skipprep" != "y" ]]; then - export job=gfsmos_prep_${cyc}_${pid} - export COMIN=$GFSDIR - jobCard=$HOMEmdl/jobs/JGFSMOS_STN_PREP -# Define DATA and create directory - export DATA=$STMPROOT/qprod/gfsmos_prep_${cyc} - export logfile=$dailylog/$job.out - export out_dir=$dailylog - - export NTASK=3 - export PTILE=3 - export OMP_NUM_THREADS=1 - -# bsub -J $job \ -# -o $logfile \ -# -q $QUEUE \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 0:25 \ -# -P $ACCOUNT \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=1g -t 00:25:00 -o $logfile $jobCard -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=1g -t 01:00:00 -o $logfile $jobCard 1> temp -JGFSMOS_STN_PREP_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -fi #end for skipprep - -######################################################################## -# JGFSMOS_FORECAST -######################################################################## -#if [[ ! -d /gpfs/dell1/nco/ops/com/mos/prod/hry_mos.$PDY ]]; then -# export ROTATE=/gpfs/dell2/mdl/mdlstat/noscrub/rotate -# #export COMINhry_mos=$PTMPROOT/hourly.$PDY -# export COMINhry_mos=/scratch1/NCEPDEV/mdl/Michael.N.Baker/hry/hry_mos.$PDY -# if [[ ! -d $COMINhry_mos ]]; then -# mkdir -p $COMINhry_mos -# fi -# \cp $ROTATE/hry/${PDY}03 $COMINhry_mos/sfctbl.03 -# \cp $ROTATE/hry/${PDY}09 $COMINhry_mos/sfctbl.09 -# \cp $ROTATE/hry/${PDY}15 $COMINhry_mos/sfctbl.15 -# \cp $ROTATE/hry/${PDY}21 $COMINhry_mos/sfctbl.21 -#fi -export COMINhry_mos=/scratch1/NCEPDEV/mdl/Michael.N.Baker/hry/hry_mos.$PDY - -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT - -export job=gfsmos_fcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_STN_FORECAST -export DATA=$STMPROOT/qprod/gfsmos_fcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" ]]; then - #ORIG deps="done(gfsmos_prep_${cyc}_${pid})" - deps="afterany:$JGFSMOS_STN_PREP_JOBID" -else - deps="" -fi -if [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" != "Y" ]]; then - complist="metar pac cooprfcmeso goe higoe akgoe tstms" - complist2="copopo3 coptype akpopo3 akptype" -else - complist="metar cooprfcmeso tstms" - complist2="" -fi - -if [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" != "Y" ]]; then - export NTASK=11 - export PTILE=1 - export OMP_NUM_THREADS=1 -elif [[ $cyc == "00" || $cyc == "12" ]] && [[ "$stnonly" == "Y" ]]; then - export NTASK=5 - export PTILE=5 - export OMP_NUM_THREADS=1 -else - export NTASK=4 - export PTILE=4 - export OMP_NUM_THREADS=1 -fi - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 0:20 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 00:20:00 -o $logfile $jobCard -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 01:00:00 -o $logfile --dependency=$deps $jobCard 1> temp -JGFSMOS_STN_FORECAST_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -######################################################################## -# JGFSMOS_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_STN_PRDGEN -export job=gfsmos_prdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$STMPROOT/qprod/gfsmos_prdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -#ORIG deps="done(gfsmos_fcst_${cyc}_${pid})" -deps="afterany:$JGFSMOS_STN_FORECAST_JOBID" -# Set Nodes -if [ $cyc -eq 00 -o $cyc -eq 12 ] && [[ "$stnonly" != "Y" ]]; then - #nodes='1*{select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - #ORIG export OMP_NUM_THREADS=20 - export OMP_NUM_THREADS=1 -else - #nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=1 -fi - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 0:30 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 00:30:00 -o $logfile $jobCard -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 02:00:00 -o $logfile --dependency=$deps $jobCard 1> temp -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --exclusive -t 02:00:00 -o $logfile --dependency=$deps $jobCard 1> temp -JGFSMOS_STN_PRDGEN_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -######################################################################## -# JGFSMOS_WX_PRDGEN (00z and 12z only) -######################################################################## -#if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -# jobCard=$HOMEmdl/jobs/JGFSMOS_WX_PRDGEN -# export job=gfsmos_wx_prdgen_${cyc}_${pid} -# # Change COMIN back to COMOUT -# export COMIN=$COMOUT -# # Define DATA and create directory -# export DATA=$STMPROOT/qprod/gfsmos_wx_prdgen_${cyc} -# export logfile=$dailylog/$job.out -# export out_dir=$dailylog -# # Set dependencies -# deps="done(gfsmos_prdgen_${cyc}_${pid})" -# -# export NTASK=2 -# export PTILE=1 -# export OMP_NUM_THREADS=20 -# -# #bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -# # -W 1:00 -M 1000 \ -# # -extsched 'CRAYLINUX[]' \ -# # -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ -# # -w "$deps" \ -# # $jobCard -# -# bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -M 3000 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard -#fi - -fi #endif short - -######################################################################## -######################################################################## -# EXTENDED-RANGE JOBS -######################################################################## -######################################################################## - -if [ "$range" == "ext" -o "$range" == "both" ]; then - -######################################################################## -# Wait for 1/4 degree model data -######################################################################## -#if [ "$skipmodel" != "y" ]; then -#let attempts=1 -#proj_list=$(seq -f %03g 252 12 384) -#for tau in $proj_list -#do -# while [[ $attempts -le 120 ]] -# do -# if [[ -f $GFSDIR/gfs.$cycle.pgrb2.0p25.f${tau} && -f $GFSDIR/gfs.$cycle.pgrb2.0p50.f${tau} ]]; then -# echo "Model file found. Proceeding to next..." -# break -# else -# if [[ $attempts -le 60 ]]; then -# sleep 60 -# else -# sleep 180 -# fi -# attempts=$((attempts+1)) -# fi -# done -#done -# -#if [[ $attempts -gt 120 ]]; then -# echo "Waited 4 hours for model file, but it did not appear." -# echo "Exiting..." -# exit 1 -#fi - -#fi #endif for skipmodel - -######################################################################## -# Wait for 1.0/2.5 degree GFS model files before running (Pacific GFS) -######################################################################## -#if [ "$skipmodel" != "y" ]; then -#let attempts1deg=1 -#proj_list=$(seq -f %03g 204 12 384) -#for tau in $proj_list -#do -# while [[ $attempts1deg -le 120 ]] -# do -## ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -# #if [[ -f $GFSDIR/gfs.$cycle.pgrb2.1p00.f384 && -f $GFSDIR/gfs.$cycle.pgrb2.2p50.f240 ]]; then -# if [ $cyc -eq 00 ]; then -# waitfile=$GFSDIR/gfs.$cycle.pgrb2.1p00.f384 -# else -# waitfile=$GFSDIR/gfs.$cycle.pgrb2.1p00.f180 -# fi -# if [[ -f $waitfile ]]; then -## ERIC ENGLE 05/30/2018 - PATCH FOR FV3GFS WINTER 17/18 PARALLEL -# #echo "1.0/2.5 degree model files found. Proceeding to next..." -# echo "1.0 degree model files found. Proceeding to next..." -# break -# else -# if [[ $attempts1deg -le 60 ]]; then -# sleep 60 -# else -# sleep 180 -# fi -# attempts1deg=$((aattempts1deg+1)) -# fi -# done -#done -# -#if [[ $attempts1deg -gt 120 ]]; then -# echo "Waited 4 hours for 1.0 degree model file, but it did not appear." -# echo "Exiting..." -# exit 1 -#fi - -#fi #endif for skipmodel - -######################################################################## -# JGFSMOS_EXT_PREP47 -######################################################################## -if [[ "$skipprep" != "y" ]]; then - export job=gfsmos_extprep_${cyc}_${pid} - export COMIN=$GFSDIR - jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_PREP -# Define DATA and create directory - export DATA=$STMPROOT/qprod/gfsmos_extprep_${cyc} - export logfile=$dailylog/$job.out - export out_dir=$dailylog - - export NTASK=2 - export PTILE=2 - export OMP_NUM_THREADS=1 - -# bsub -J $job \ -# -o $logfile \ -# -q $QUEUE \ -# -W 2:00 \ -# -M 2500 \ -# -P MDLST-T2O \ -# -extsched 'CRAYLINUX[]' \ -# -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ -# $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 2:00 \ -# -P $ACCOUNT \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 00:10:00 -o $logfile $jobCard -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 01:00:00 -o $logfile $jobCard 1> temp -JGFSMOS_EXT_STN_PREP_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -fi #end for skipprep - -# Skip EXT_FORECAST for 06/18 -if [ $cyc -eq 00 -o $cyc -eq 12 ]; then -######################################################################## -# JGFSMOS_EXT_FORECAST -######################################################################## -#if [[ ! -d /gpfs/dell1/nco/ops/com/mos/prod/hry_mos.$PDY ]]; then -# export ROTATE=/gpfs/dell2/mdl/mdlstat/noscrub/rotate -# #export COMINhry_mos=$PTMPROOT/hourly.$PDY -# export COMINhry_mos=/scratch1/NCEPDEV/mdl/Michael.N.Baker/hry/hry_mos.$PDY -# if [[ ! -d $COMINhry_mos ]]; then -# mkdir -p $COMINhry_mos -# fi -# \cp $ROTATE/hry/${PDY}03 $COMINhry_mos/sfctbl.03 -# \cp $ROTATE/hry/${PDY}09 $COMINhry_mos/sfctbl.09 -# \cp $ROTATE/hry/${PDY}15 $COMINhry_mos/sfctbl.15 -# \cp $ROTATE/hry/${PDY}21 $COMINhry_mos/sfctbl.21 -#fi -export COMINhry_mos=/scratch1/NCEPDEV/mdl/Michael.N.Baker/hry/hry_mos.$PDY - -# Change COMIN to get files from user's PTMP "qprod" area -export COMIN=$COMOUT - -export job=gfsmos_extfcst_${cyc}_${pid} -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_FORECAST -export DATA=$STMPROOT/qprod/gfsmos_extfcst_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$skipprep" != "y" && "$range" == "both" ]]; then - #ORIG deps="done(gfsmos_extprep_${cyc}_${pid}) && done(gfsmos_fcst_${cyc}_${pid})" - deps="afterany:${JGFSMOS_EXT_STN_PREP_JOBID},${JGFSMOS_STN_FORECAST_JOBID}" -elif [[ "$skipprep" != "y" && "$range" == "ext" ]]; then - #ORIG deps="done(gfsmos_extprep_${cyc}_${pid})" - deps="afterany:$JGFSMOS_EXT_STN_PREP_JOBID" -elif [[ "$skipprep" == "y" && "$range" == "ext" ]]; then - deps="" -else - #ORIG deps="done(gfsmos_fcst_${cyc}_${pid})" - deps="afterany:$JGFSMOS_STN_FORECAST_JOBID" -fi - -if [[ $stnonly != "Y" ]]; then - export NTASK=10 - export PTILE=1 - export OMP_NUM_THREADS=1 -else - export NTASK=3 - export PTILE=3 - export OMP_NUM_THREADS=1 -fi - -#bsub -J ${job} -oo $logfile -q $QUEUE -P MDLST-T2O \ -# -W 1:00 -M 2000 \ -# -extsched 'CRAYLINUX[]' \ -# -R '1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' \ -# -w "$deps" \ -# $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 01:00:00 -o $logfile $jobCard -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 02:00:00 -o $logfile --dependency=$deps $jobCard 1> temp -JGFSMOS_EXT_STN_FORECAST_JOBID=$(cat temp | sed 's/[A-Za-z ]//g') - -fi #endif for skipping 06/18 ext_fcst -######################################################################## -# JGFSMOS_EXT_PRDGEN -######################################################################## -jobCard=$HOMEmdl/jobs/JGFSMOS_EXT_STN_PRDGEN -export job=gfsmos_extprdgen_${cyc}_${pid} -# Change COMIN back to COMOUT -export COMIN=$COMOUT -# Define DATA and create directory -export DATA=$STMPROOT/qprod/gfsmos_extprdgen_${cyc} -export logfile=$dailylog/$job.out -export out_dir=$dailylog -# Set dependencies -if [[ "$cyc" == "06" || "$cyc" == "18" ]]; then - #ORIG deps="done(gfsmos_prdgen_${cyc}_${pid})" - deps="afterany:$JGFSMOS_STN_PRDGEN_JOBID" -elif [[ "$range" == "both" ]]; then - #ORIG deps="done(gfsmos_extfcst_${cyc}_${pid}) && done(gfsmos_prdgen_${cyc}_${pid})" - deps="afterany:${JGFSMOS_EXT_STN_FORECAST_JOBID},${JGFSMOS_STN_PRDGEN_JOBID}" -else - #ORIG deps="done(gfsmos_extfcst_${cyc}_${pid})" - deps="afterany:$JGFSMOS_EXT_STN_FORECAST" -fi -# Set Nodes -if [ $cyc -eq 00 -o $cyc -eq 12 ] && [[ "$stnonly" != "Y" ]]; then - #nodes='1*{select[craylinux && !vnode]} + 168*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - #ORIG export OMP_NUM_THREADS=20 - export OMP_NUM_THREADS=1 -else - #nodes='1*{select[craylinux && !vnode]} + 24*{select[craylinux && vnode]span[ptile=24]}' - export NTASK=1 - export PTILE=1 - export OMP_NUM_THREADS=1 -fi - - -#bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -# -W 1:00 -M 2000 \ -# -extsched 'CRAYLINUX[]' \ -# -R "$nodes" \ -# -w "$deps" \ -# $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} \ -# -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -# HERA (Slurm) -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 01:00:00 -o $logfile $jobCard -#sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --mem-per-cpu=4g -t 02:00:00 -o $logfile --dependency=$deps $jobCard #NOTE: No need to redirect stdout. -sbatch -A $ACCOUNT -J $job -q batch -n $NTASK --ntasks-per-node=$PTILE --exclusive -t 02:00:00 -o $logfile --dependency=$deps $jobCard #NOTE: No need to redirect stdout. - -# Exit here with $cyc is 06 or 18 -if [ $cyc -eq 06 -o $cyc -eq 18 ]; then - exit 0 -fi -######################################################################## -# JGFSMOS_WX_EXT_PRDGEN -######################################################################## -#jobCard=$HOMEmdl/jobs/JGFSMOS_WX_EXT_PRDGEN -#export job=gfsmos_wx_extprdgen_${cyc}_${pid} -## Change COMIN back to COMOUT -#export COMIN=$COMOUT -## Define DATA and create directory -#export DATA=$STMPROOT/qprod/gfsmos_wx_extprdgen_${cyc} -#export logfile=$dailylog/$job.out -#export out_dir=$dailylog -## Set dependencies -#if [[ "$range" == "both" ]]; then -# deps="done(gfsmos_extprdgen_${cyc}_${pid}) && done(gfsmos_wx_prdgen_${cyc}_${pid})" -#else -# deps="done(gfsmos_extprdgen_${cyc}_${pid})" -#fi -# -#export NTASK=1 -#export PTILE=1 -#export OMP_NUM_THREADS=20 - -##bsub -J ${job} -oo $logfile -q ${QUEUE} -P MDLST-T2O \ -## -W 1:00 -M 1000 \ -## -extsched 'CRAYLINUX[]' \ -## -R '1*{select[craylinux && !vnode]} + 48*{select[craylinux && vnode]span[ptile=24]}' \ -## -w "$deps" \ -## $jobCard - -#bsub -J ${job} \ -# -o $logfile \ -# -q ${QUEUE} -x \ -# -n $NTASK \ -# -R "span[ptile=$PTILE]" \ -# -W 1:00 \ -# -M 3000 \ -# -P $ACCOUNT \ -# -w "$deps" \ -# $jobCard - -fi #endif for ext/both - -#--------------------- -export SENDCOM=$SENDCOM_SAVE - -exit 0 diff --git a/scripts/run_reg2grb2.sh b/scripts/run_reg2grb2.sh index e1b1e927bf..ab2c80043e 100755 --- a/scripts/run_reg2grb2.sh +++ b/scripts/run_reg2grb2.sh @@ -1,10 +1,11 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash + +source "${HOMEgfs}/ush/preamble.sh" #requires grib_util module -MOM6REGRID=${MOM6REGRID:-$HOMEgfs} -export mask_file=$MOM6REGRID/fix/fix_reg2grb2/mask.0p25x0p25.grb2 +MOM6REGRID=${MOM6REGRID:-${HOMEgfs}} +export mask_file="${MOM6REGRID}/fix/reg2grb2/mask.0p25x0p25.grb2" # offline testing: #export DATA= @@ -13,25 +14,25 @@ export mask_file=$MOM6REGRID/fix/fix_reg2grb2/mask.0p25x0p25.grb2 #export outfile=$DATA/DATA0p5/out/ocnh2012010106.01.2012010100.grb2 # # workflow testing: -export icefile=icer${CDATE}.${ENSMEM}.${IDATE}_0p25x0p25_CICE.nc -export ocnfile=ocnr${CDATE}.${ENSMEM}.${IDATE}_0p25x0p25_MOM6.nc -export outfile=ocn_ice${CDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2 -export outfile0p5=ocn_ice${CDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2 +export icefile="icer${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_CICE.nc" +export ocnfile="ocnr${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25_MOM6.nc" +export outfile="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p25x0p25.grb2" +export outfile0p5="ocn_ice${VDATE}.${ENSMEM}.${IDATE}_0p5x0p5.grb2" export mfcstcpl=${mfcstcpl:-1} export IGEN_OCNP=${IGEN_OCNP:-197} # PT This is the forecast date -export year=$(echo $CDATE | cut -c1-4) -export month=$(echo $CDATE | cut -c5-6) -export day=$(echo $CDATE | cut -c7-8) -export hour=$(echo $CDATE | cut -c9-10) +export year=${VDATE:0:4} +export month=${VDATE:4:2} +export day=${VDATE:6:2} +export hour=${VDATE:8:2} # PT This is the initialization date -export syear=$(echo $IDATE | cut -c1-4) -export smonth=$(echo $IDATE | cut -c5-6) -export sday=$(echo $IDATE | cut -c7-8) -export shour=$(echo $IDATE | cut -c9-10) +export syear=${IDATE:0:4} +export smonth=${IDATE:4:2} +export sday=${IDATE:6:2} +export shour=${IDATE:8:2} # PT Need to get this from above - could be 6 or 1 hour export hh_inc_ocn=6 @@ -62,10 +63,10 @@ export flatn=90. export flonw=0.0 export flone=359.75 -ln -sf $mask_file ./iceocnpost.g2 -$executable > reg2grb2.$CDATE.$IDATE.out +ln -sf "${mask_file}" ./iceocnpost.g2 +${executable} > "reg2grb2.${VDATE}.${IDATE}.out" # interpolated from 0p25 to 0p5 grid grid2p05="0 6 0 0 0 0 0 0 720 361 0 0 90000000 0 48 -90000000 359500000 500000 500000 0" -#### $NWPROD/util/exec/copygb2 -g "${grid2p05}" -i0 -x $outfile $outfile0p5 -$COPYGB2 -g "${grid2p05}" -i0 -x $outfile $outfile0p5 +${COPYGB2} -g "${grid2p05}" -i0 -x "${outfile}" "${outfile0p5}" + diff --git a/scripts/run_regrid.sh b/scripts/run_regrid.sh index 2e59e0aafe..103e9a759e 100755 --- a/scripts/run_regrid.sh +++ b/scripts/run_regrid.sh @@ -1,25 +1,27 @@ -#!/bin/bash -set -x +#! /usr/bin/env bash -echo "Entered $0" -MOM6REGRID=${MOM6REGRID:-$HOMEgfs} -export EXEC_DIR=$MOM6REGRID/exec -export USH_DIR=$MOM6REGRID/ush -export COMOUTocean=$COMOUTocean -export COMOUTice=$COMOUTice -export IDATE=$IDATE -export ENSMEM=$ENSMEM -export FHR=$fhr -export DATA=$DATA -export FIXreg2grb2=$FIXreg2grb2 +source "${HOMEgfs}/ush/preamble.sh" + +MOM6REGRID="${MOM6REGRID:-${HOMEgfs}}" +export EXEC_DIR="${MOM6REGRID}/exec" +export USH_DIR="${MOM6REGRID}/ush" +export COMOUTocean="${COM_OCEAN_HISTORY}" +export COMOUTice="${COM_ICE_HISTORY}" +export IDATE="${IDATE}" +export VDATE="${VDATE}" +export ENSMEM="${ENSMEM}" +export FHR="${fhr}" +export DATA="${DATA}" +export FIXreg2grb2="${FIXreg2grb2}" ###### DO NOT MODIFY BELOW UNLESS YOU KNOW WHAT YOU ARE DOING ####### #Need NCL module to be loaded: -echo $NCARG_ROOT -export NCL=$NCARG_ROOT/bin/ncl +echo "${NCARG_ROOT}" +export NCL="${NCARG_ROOT}/bin/ncl" ls -alrt -$NCL $USH_DIR/icepost.ncl -$NCL $USH_DIR/ocnpost.ncl +${NCL} "${USH_DIR}/icepost.ncl" +${NCL} "${USH_DIR}/ocnpost.ncl" ##################################################################### + diff --git a/sorc/build_all.sh b/sorc/build_all.sh index 343b4f2324..eaba2485d3 100755 --- a/sorc/build_all.sh +++ b/sorc/build_all.sh @@ -12,218 +12,281 @@ set +x # END USER DEFINED STUFF #------------------------------------ function _usage() { - cat <<-EOF - Builds all of the global-workflow components by calling the individual build - scripts in sequence. - - Usage: $BASH_SOURCE [-a UFS_app][-c build_config][-h][-v] - -a UFS_app: - Build a specific UFS app instead of the default - -c build_config: - Selectively build based on the provided config instead of the default config - -h: - print this help message and exit - -v: - Execute all build scripts with -v option to turn on verbose where supported - - EOF - exit 1 + cat << EOF +Builds all of the global-workflow components by calling the individual build + scripts in sequence. + +Usage: ${BASH_SOURCE[0]} [-a UFS_app][-c build_config][-h][-v] + -a UFS_app: + Build a specific UFS app instead of the default + -c build_config: + Selectively build based on the provided config instead of the default config + -h: + print this help message and exit + -v: + Execute all build scripts with -v option to turn on verbose where supported +EOF + exit 1 } +script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) +cd "${script_dir}" || exit 1 + _build_ufs_opt="" +_ops_opt="" _verbose_opt="" +_partial_opt="" # Reset option counter in case this script is sourced OPTIND=1 -while getopts ":a:c:hv" option; do - case "${option}" in - a) _build_ufs_opt+="-a ${OPTARG} ";; - c) _partial_opt+="-c ${OPTARG} ";; - h) _usage;; - # s) _build_ufs_opt+="-s ${OPTARG} ";; - v) _verbose_opt="-v";; - \?) - echo "[$BASH_SOURCE]: Unrecognized option: ${option}" - usage - ;; - :) - echo "[$BASH_SOURCE]: ${option} requires an argument" - usage - ;; - esac +while getopts ":a:c:hov" option; do + case "${option}" in + a) _build_ufs_opt+="-a ${OPTARG} ";; + c) _partial_opt+="-c ${OPTARG} ";; + h) _usage;; + o) _ops_opt+="-o";; + v) _verbose_opt="-v";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac done shift $((OPTIND-1)) -build_dir=$(pwd) -logs_dir=$build_dir/logs -if [ ! -d $logs_dir ]; then - echo "Creating logs folder" - mkdir $logs_dir +logs_dir="${script_dir}/logs" +if [[ ! -d "${logs_dir}" ]]; then + echo "Creating logs folder" + mkdir "${logs_dir}" || exit 1 fi # Check final exec folder exists -if [ ! -d "../exec" ]; then - echo "Creating ../exec folder" - mkdir ../exec +if [[ ! -d "../exec" ]]; then + echo "Creating ../exec folder" + mkdir ../exec fi #------------------------------------ # GET MACHINE #------------------------------------ -target="" -source ./machine-setup.sh > /dev/null 2>&1 +export COMPILER="intel" +source gfs_utils.fd/ush/detect_machine.sh +source gfs_utils.fd/ush/module-setup.sh +if [[ -z "${MACHINE_ID}" ]]; then + echo "FATAL: Unable to determine target machine" + exit 1 +fi #------------------------------------ -# INCLUDE PARTIAL BUILD +# INCLUDE PARTIAL BUILD #------------------------------------ -source ./partial_build.sh $_verbose_opt $_partial_opt - -if [ $target = jet ]; then - Build_gsi=false - Build_gldas=false - Build_gfs_util=false - Build_ww3_prepost=false -fi +# Turn off some shellcheck warnings because we want to have +# variables with multiple arguments. +# shellcheck disable=SC2086,SC2248 +source ./partial_build.sh ${_verbose_opt} ${_partial_opt} +# shellcheck disable= #------------------------------------ # Exception Handling Init #------------------------------------ +# Disable shellcheck warning about single quotes not being substituted. +# shellcheck disable=SC2016 ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} +# shellcheck disable= err=0 #------------------------------------ -# build WW3 pre & post execs +# build gfs_utils #------------------------------------ -$Build_ww3_prepost && { - echo " .... Building WW3 pre and post execs .... " - ./build_ww3prepost.sh ${_verbose_opt} > $logs_dir/build_ww3_prepost.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building WW3 pre/post processing." - echo "The log file is in $logs_dir/build_ww3_prepost.log" - fi - ((err+=$rc)) -} +if [[ ${Build_gfs_utils} == 'true' ]]; then + echo " .... Building gfs_utils .... " + # shellcheck disable=SC2086,SC2248 + ./build_gfs_utils.sh ${_verbose_opt} > "${logs_dir}/build_gfs_utils.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building gfs_utils." + echo "The log file is in ${logs_dir}/build_gfs_utils.log" + fi + err=$((err + rc)) +fi #------------------------------------ -# build forecast model +# build WW3 pre & post execs #------------------------------------ -$Build_ufs_model && { - echo " .... Building forecast model .... " - ./build_ufs.sh $_verbose_opt ${_build_ufs_opt} > $logs_dir/build_ufs.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building UFS model." - echo "The log file is in $logs_dir/build_ufs.log" - fi - ((err+=$rc)) -} +if [[ ${Build_ww3_prepost} == "true" ]]; then + echo " .... Building WW3 pre and post execs .... " + # shellcheck disable=SC2086,SC2248 + ./build_ww3prepost.sh ${_verbose_opt} ${_build_ufs_opt} > "${logs_dir}/build_ww3_prepost.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building WW3 pre/post processing." + echo "The log file is in ${logs_dir}/build_ww3_prepost.log" + fi + err=$((err + rc)) +fi #------------------------------------ -# build gsi +# build forecast model #------------------------------------ -$Build_gsi && { - echo " .... Building gsi .... " - ./build_gsi.sh $_verbose_opt > $logs_dir/build_gsi.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building gsi." - echo "The log file is in $logs_dir/build_gsi.log" - fi - ((err+=$rc)) -} +if [[ ${Build_ufs_model} == 'true' ]]; then + echo " .... Building forecast model .... " + # shellcheck disable=SC2086,SC2248 + ./build_ufs.sh ${_verbose_opt} ${_build_ufs_opt} > "${logs_dir}/build_ufs.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building UFS model." + echo "The log file is in ${logs_dir}/build_ufs.log" + fi + err=$((err + rc)) +fi #------------------------------------ -# build UPP +# build GSI and EnKF - optional checkout #------------------------------------ -$Build_upp && { - echo " .... Building UPP .... " - ./build_upp.sh $_verbose_opt > $logs_dir/build_upp.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building UPP." - echo "The log file is in $logs_dir/build_upp.log" - fi - ((err+=$rc)) -} +if [[ -d gsi_enkf.fd ]]; then + if [[ ${Build_gsi_enkf} == 'true' ]]; then + echo " .... Building gsi and enkf .... " + # shellcheck disable=SC2086,SC2248 + ./build_gsi_enkf.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_enkf.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building gsi_enkf." + echo "The log file is in ${logs_dir}/build_gsi_enkf.log" + fi + err=$((err + rc)) + fi +else + echo " .... Skip building gsi and enkf .... " +fi #------------------------------------ -# build ufs_utils +# build gsi utilities #------------------------------------ -$Build_ufs_utils && { - echo " .... Building ufs_utils .... " - ./build_ufs_utils.sh $_verbose_opt > $logs_dir/build_ufs_utils.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building ufs_utils." - echo "The log file is in $logs_dir/build_ufs_utils.log" - fi - ((err+=$rc)) -} +if [[ -d gsi_utils.fd ]]; then + if [[ ${Build_gsi_utils} == 'true' ]]; then + echo " .... Building gsi utilities .... " + # shellcheck disable=SC2086,SC2248 + ./build_gsi_utils.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_utils.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building gsi utilities." + echo "The log file is in ${logs_dir}/build_gsi_utils.log" + fi + err=$((err + rc)) + fi +else + echo " .... Skip building gsi utilities .... " +fi #------------------------------------ -# build gldas +# build gdas - optional checkout #------------------------------------ -$Build_gldas && { - echo " .... Building gldas .... " - ./build_gldas.sh $_verbose_opt > $logs_dir/build_gldas.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building gldas." - echo "The log file is in $logs_dir/build_gldas.log" - fi - ((err+=$rc)) -} +if [[ -d gdas.cd ]]; then + if [[ ${Build_gdas} == 'true' ]]; then + echo " .... Building GDASApp .... " + # shellcheck disable=SC2086,SC2248 + ./build_gdas.sh ${_verbose_opt} > "${logs_dir}/build_gdas.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building GDASApp." + echo "The log file is in ${logs_dir}/build_gdas.log" + fi + err=$((err + rc)) + fi +else + echo " .... Skip building GDASApp .... " +fi #------------------------------------ -# build gfs_wafs - optional checkout +# build gsi monitor #------------------------------------ -if [ -d gfs_wafs.fd ]; then - $Build_gfs_wafs && { - echo " .... Building gfs_wafs .... " - ./build_gfs_wafs.sh $_verbose_opt > $logs_dir/build_gfs_wafs.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building gfs_wafs." - echo "The log file is in $logs_dir/build_gfs_wafs.log" - fi - ((err+=$rc)) -} +if [[ -d gsi_monitor.fd ]]; then + if [[ ${Build_gsi_monitor} == 'true' ]]; then + echo " .... Building gsi monitor .... " + # shellcheck disable=SC2086,SC2248 + ./build_gsi_monitor.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_gsi_monitor.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building gsi monitor." + echo "The log file is in ${logs_dir}/build_gsi_monitor.log" + fi + err=$((err + rc)) + fi +else + echo " .... Skip building gsi monitor .... " fi #------------------------------------ -# build workflow_utils +# build UPP #------------------------------------ -$Build_workflow_utils && { - echo " .... Building workflow_utils .... " - target=$target ./build_workflow_utils.sh $_verbose_opt > $logs_dir/build_workflow_utils.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building workflow_utils." - echo "The log file is in $logs_dir/build_workflow_utils.log" - fi - ((err+=$rc)) -} +if [[ ${Build_upp} == 'true' ]]; then + echo " .... Building UPP .... " + # shellcheck disable=SC2086,SC2248 + ./build_upp.sh ${_ops_opt} ${_verbose_opt} > "${logs_dir}/build_upp.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building UPP." + echo "The log file is in ${logs_dir}/build_upp.log" + fi + err=$((err + rc)) +fi #------------------------------------ -# build gfs_util +# build ufs_utils #------------------------------------ -$Build_gfs_util && { - echo " .... Building gfs_util .... " - ./build_gfs_util.sh $_verbose_opt > $logs_dir/build_gfs_util.log 2>&1 - rc=$? - if [[ $rc -ne 0 ]] ; then - echo "Fatal error in building gfs_util." - echo "The log file is in $logs_dir/build_gfs_util.log" - fi - ((err+=$rc)) -} +if [[ ${Build_ufs_utils} == 'true' ]]; then + echo " .... Building ufs_utils .... " + # shellcheck disable=SC2086,SC2248 + ./build_ufs_utils.sh ${_verbose_opt} > "${logs_dir}/build_ufs_utils.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building ufs_utils." + echo "The log file is in ${logs_dir}/build_ufs_utils.log" + fi + err=$((err + rc)) +fi + +#------------------------------------ +# build gfs_wafs - optional checkout +#------------------------------------ +if [[ -d gfs_wafs.fd ]]; then + if [[ ${Build_gfs_wafs} == 'true' ]]; then + echo " .... Building gfs_wafs .... " + # shellcheck disable=SC2086,SC2248 + ./build_gfs_wafs.sh ${_verbose_opt} > "${logs_dir}/build_gfs_wafs.log" 2>&1 + # shellcheck disable= + rc=$? + if (( rc != 0 )) ; then + echo "Fatal error in building gfs_wafs." + echo "The log file is in ${logs_dir}/build_gfs_wafs.log" + fi + err=$((err + rc)) + fi +fi #------------------------------------ # Exception Handling #------------------------------------ -[[ $err -ne 0 ]] && echo "FATAL BUILD ERROR: Please check the log file for detail, ABORT!" -$ERRSCRIPT || exit $err +if (( err != 0 )); then + cat << EOF +BUILD ERROR: One or more components failed to build + Check the associated build log(s) for details. +EOF + ${ERRSCRIPT} || exit "${err}" +fi echo;echo " .... Build system finished .... " diff --git a/sorc/build_gdas.sh b/sorc/build_gdas.sh new file mode 100755 index 0000000000..39cf5ac9a7 --- /dev/null +++ b/sorc/build_gdas.sh @@ -0,0 +1,29 @@ +#! /usr/bin/env bash +set -eux + +OPTIND=1 +while getopts ":dov" option; do + case "${option}" in + d) export BUILD_TYPE="DEBUG";; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + +# TODO: GDASApp does not presently handle BUILD_TYPE + +BUILD_TYPE=${BUILD_TYPE:-"Release"} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +BUILD_JOBS="${BUILD_JOBS:-8}" \ +WORKFLOW_BUILD="ON" \ +./gdas.cd/build.sh + +exit diff --git a/sorc/build_gfs_util.sh b/sorc/build_gfs_util.sh deleted file mode 100755 index 4844bf0068..0000000000 --- a/sorc/build_gfs_util.sh +++ /dev/null @@ -1,21 +0,0 @@ -#! /usr/bin/env bash -set -eux - -source ./machine-setup.sh > /dev/null 2>&1 -export dir=$( pwd ) - -cd ../util/sorc - -# Check for gfs_util folders exist -if [ ! -d "./mkgfsawps.fd" ]; then - echo " " - echo " GFS_UTIL folders DO NOT exist " - echo " " - exit -fi - -echo "" -echo " Building ... Executables for GFS_UTILITIES " -echo "" - -source ./compile_gfs_util_wcoss.sh diff --git a/sorc/build_gfs_utils.sh b/sorc/build_gfs_utils.sh new file mode 100755 index 0000000000..2a7a611239 --- /dev/null +++ b/sorc/build_gfs_utils.sh @@ -0,0 +1,45 @@ +#! /usr/bin/env bash +set -eux + +function usage() { + cat << EOF +Builds the GFS utility programs. + +Usage: ${BASH_SOURCE[0]} [-d][-h][-v] + -d: + Build with debug options + -h: + Print this help message and exit + -v: + Turn on verbose output +EOF + exit 1 +} + +cwd=$(pwd) + +OPTIND=1 +while getopts ":dvh" option; do + case "${option}" in + d) export BUILD_TYPE="DEBUG";; + v) export BUILD_VERBOSE="YES";; + h) + usage + ;; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + +BUILD_TYPE=${BUILD_TYPE:-"Release"} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +"${cwd}/gfs_utils.fd/ush/build.sh" + +exit diff --git a/sorc/build_gfs_wafs.sh b/sorc/build_gfs_wafs.sh index 7ddde2d678..cbbf6ec950 100755 --- a/sorc/build_gfs_wafs.sh +++ b/sorc/build_gfs_wafs.sh @@ -1,11 +1,11 @@ #! /usr/bin/env bash set -eux -source ./machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) +script_dir=$(dirname "${BASH_SOURCE[0]}") +cd "${script_dir}" || exit 1 # Check final exec folder exists -if [ ! -d "../exec" ]; then +if [[ ! -d "../exec" ]]; then mkdir ../exec fi diff --git a/sorc/build_gldas.sh b/sorc/build_gldas.sh deleted file mode 100755 index 635c2bee17..0000000000 --- a/sorc/build_gldas.sh +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env bash -set -eux - -source ./machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) - -# Check final exec folder exists -if [ ! -d "../exec" ]; then - mkdir ../exec -fi - -cd gldas.fd/sorc -./build_all_gldas.sh - -exit - diff --git a/sorc/build_gsi.sh b/sorc/build_gsi.sh deleted file mode 100755 index beb46c1bd9..0000000000 --- a/sorc/build_gsi.sh +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env bash -set -eux - -source ./machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) - -gsitarget=$target -[[ "$target" == wcoss_cray ]] && gsitarget=cray - -# Check final exec folder exists -if [ ! -d "../exec" ]; then - mkdir ../exec -fi - -cd gsi.fd/ush/ -./build_all_cmake.sh "PRODUCTION" "$cwd/gsi.fd" -##./build_all_cmake.sh "PRODUCTION" "$cwd/gsi.fd" "NCO" # use this line for pruned NCO install - -exit - diff --git a/sorc/build_gsi_enkf.sh b/sorc/build_gsi_enkf.sh new file mode 100755 index 0000000000..671c3d6205 --- /dev/null +++ b/sorc/build_gsi_enkf.sh @@ -0,0 +1,30 @@ +#! /usr/bin/env bash +set -eux + +OPTIND=1 +while getopts ":dov" option; do + case "${option}" in + d) export BUILD_TYPE="DEBUG";; + o) _ops="YES";; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + +BUILD_TYPE=${BUILD_TYPE:-"Release"} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +GSI_MODE=GFS \ +ENKF_MODE=GFS \ +REGRESSION_TESTS=NO \ +./gsi_enkf.fd/ush/build.sh + +exit + diff --git a/sorc/build_gsi_monitor.sh b/sorc/build_gsi_monitor.sh new file mode 100755 index 0000000000..ec3645e52f --- /dev/null +++ b/sorc/build_gsi_monitor.sh @@ -0,0 +1,28 @@ +#! /usr/bin/env bash +set -eux + +cwd=$(pwd) + +OPTIND=1 +while getopts ":dov" option; do + case "${option}" in + d) export BUILD_TYPE="DEBUG";; + o) _ops="YES";; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + +BUILD_TYPE=${BUILD_TYPE:-"Release"} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +"${cwd}/gsi_monitor.fd/ush/build.sh" + +exit diff --git a/sorc/build_gsi_utils.sh b/sorc/build_gsi_utils.sh new file mode 100755 index 0000000000..bcbc110cf6 --- /dev/null +++ b/sorc/build_gsi_utils.sh @@ -0,0 +1,29 @@ +#! /usr/bin/env bash +set -eux + +cwd=$(pwd) + +OPTIND=1 +while getopts ":dov" option; do + case "${option}" in + d) export BUILD_TYPE="DEBUG";; + o) _ops="YES";; # TODO - unused; remove? + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) + +BUILD_TYPE=${BUILD_TYPE:-"Release"} \ +BUILD_VERBOSE=${BUILD_VERBOSE:-"NO"} \ +UTIL_OPTS="-DBUILD_UTIL_ENKF_GFS=ON -DBUILD_UTIL_NCIO=ON" \ +"${cwd}/gsi_utils.fd/ush/build.sh" + +exit diff --git a/sorc/build_haiqin_fork.sh b/sorc/build_haiqin_fork.sh new file mode 100755 index 0000000000..d28e0d3f81 --- /dev/null +++ b/sorc/build_haiqin_fork.sh @@ -0,0 +1,43 @@ +#! /usr/bin/env bash +set -eux + +cwd=$(pwd) + +# Default settings +APP="S2SWA" +CCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_c3" +#JKHCCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8" + +export RT_COMPILER="intel" +source "${cwd}/ufs_model.fd_haiqin_fork/tests/detect_machine.sh" +source "${cwd}/ufs_model.fd_haiqin_fork/tests/module-setup.sh" + +while getopts ":da:v" option; do + case "${option}" in + d) BUILD_TYPE="DEBUG";; + a) APP="${OPTARG}" ;; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + ;; + esac +done + +cd "${cwd}/ufs_model.fd_haiqin_fork" + +MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}" +[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON" +COMPILE_NR=0 +CLEAN_BEFORE=YES +CLEAN_AFTER=NO + +./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "${RT_COMPILER}" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" +#./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" +mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x +mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua +cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua + +exit 0 diff --git a/sorc/build_ufs.sh b/sorc/build_ufs.sh index f7aa638c3d..10e4b74d77 100755 --- a/sorc/build_ufs.sh +++ b/sorc/build_ufs.sh @@ -1,46 +1,45 @@ #! /usr/bin/env bash set -eux +cwd=$(pwd) + # Default settings APP="S2SWA" -CCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_ugwpv1,FV3_RAP_noah_sfcdiff_unified_ugwp,FV3_GFS_v17_p8,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_gf_mynn" -#JKHCCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8" +CCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_thompson,FV3_GFS_v17_p8_c3" +#JKHCCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_thompson" +#JKHCCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8" -while getopts "a:s:v" option; do +export RT_COMPILER="intel" +source "${cwd}/ufs_model.fd/tests/detect_machine.sh" +source "${cwd}/ufs_model.fd/tests/module-setup.sh" + +while getopts ":da:v" option; do case "${option}" in + d) BUILD_TYPE="DEBUG";; a) APP="${OPTARG}" ;; - # s) CCPP_SUITES="${OPTARG}";; - v) BUILD_VERBOSE="YES";; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + ;; *) - echo "Unrecognized option: ${1}" - exit 1 + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" ;; esac done -source ./machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) +cd "${cwd}/ufs_model.fd" + +MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}" +[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON" +COMPILE_NR=0 +CLEAN_BEFORE=YES +CLEAN_AFTER=NO + +./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "${RT_COMPILER}" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" +## for UFS versions before 31May23 +## ./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" +mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x +mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua +cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua -# Set target platform -case "${target}" in - hera|orion|stampede|jet|cheyenne) - target=${target}.intel - ;; -esac - -MOD_PATH=$cwd/ufs_model.fd/modulefiles - -cd ufs_model.fd/ -set +x -module purge -module use ${MOD_PATH} -module load ufs_${target} -set -x - -# Remove previous build directory if it exists -if [ -d build ]; then - rm -R build -fi -mkdir -p build && cd build -cmake -DAPP=${APP} -DCCPP_SUITES=${CCPP_SUITES} .. -OMP_NUM_THREADS=1 make -j ${BUILD_JOBS:-8} VERBOSE=${BUILD_VERBOSE:-} +exit 0 diff --git a/sorc/build_ufs_12jun23.sh b/sorc/build_ufs_12jun23.sh new file mode 100755 index 0000000000..45b5e37276 --- /dev/null +++ b/sorc/build_ufs_12jun23.sh @@ -0,0 +1,43 @@ +#! /usr/bin/env bash +set -eux + +cwd=$(pwd) + +# Default settings +APP="S2SWA" +#JKHCCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_thompson,FV3_GFS_v17_p8_c3" +CCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_thompson" +#JKHCCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8" + +export RT_COMPILER="intel" +source "${cwd}/ufs_model.fd_12jun23/tests/detect_machine.sh" +source "${cwd}/ufs_model.fd_12jun23/tests/module-setup.sh" + +while getopts ":da:v" option; do + case "${option}" in + d) BUILD_TYPE="DEBUG";; + a) APP="${OPTARG}" ;; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + ;; + esac +done + +cd "${cwd}/ufs_model.fd_12jun23" + +MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}" +[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON" +COMPILE_NR=0 +CLEAN_BEFORE=YES +CLEAN_AFTER=NO + +./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "${RT_COMPILER}" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" +mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x +mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua +cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua + +exit 0 diff --git a/sorc/build_ufs_joefork.sh b/sorc/build_ufs_joefork.sh new file mode 100755 index 0000000000..0094889d1f --- /dev/null +++ b/sorc/build_ufs_joefork.sh @@ -0,0 +1,42 @@ +#! /usr/bin/env bash +set -eux + +cwd=$(pwd) + +# Default settings +APP="S2SWA" +CCPP_SUITES="FV3_GFS_v17_p8,FV3_GFS_v17_coupled_p8,FV3_GFS_v17_p8_mynn,FV3_GFS_v17_p8_thompson,FV3_GFS_v17_p8_c3" +#JKHCCPP_SUITES="FV3_GFS_v16,FV3_GFS_v16_no_nsst,FV3_GFS_v16_ugwpv1,FV3_GFS_v17_p8,FV3_GFS_v16_coupled_nsstNoahmpUGWPv1,FV3_GFS_v17_coupled_p8" + +export RT_COMPILER="intel" +source "${cwd}/ufs_model.fd_joe_fork/tests/detect_machine.sh" +source "${cwd}/ufs_model.fd_joe_fork/tests/module-setup.sh" + +while getopts ":da:v" option; do + case "${option}" in + d) BUILD_TYPE="DEBUG";; + a) APP="${OPTARG}" ;; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + ;; + esac +done + +cd "${cwd}/ufs_model.fd_joe_fork" + +MAKE_OPT="-DAPP=${APP} -D32BIT=ON -DCCPP_SUITES=${CCPP_SUITES}" +[[ ${BUILD_TYPE:-"Release"} = "DEBUG" ]] && MAKE_OPT+=" -DDEBUG=ON" +COMPILE_NR=0 +CLEAN_BEFORE=YES +CLEAN_AFTER=NO + +./tests/compile.sh "${MACHINE_ID}" "${MAKE_OPT}" "${COMPILE_NR}" "${RT_COMPILER}" "${CLEAN_BEFORE}" "${CLEAN_AFTER}" +mv "./tests/fv3_${COMPILE_NR}.exe" ./tests/ufs_model.x +mv "./tests/modules.fv3_${COMPILE_NR}.lua" ./tests/modules.ufs_model.lua +cp "./modulefiles/ufs_common.lua" ./tests/ufs_common.lua + +exit 0 diff --git a/sorc/build_ufs_utils.sh b/sorc/build_ufs_utils.sh index 0eb978825c..5e2edf0737 100755 --- a/sorc/build_ufs_utils.sh +++ b/sorc/build_ufs_utils.sh @@ -1,16 +1,10 @@ #! /usr/bin/env bash set -eux -source ./machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) +script_dir=$(dirname "${BASH_SOURCE[0]}") +cd "${script_dir}/ufs_utils.fd" || exit 1 -if [ $target = wcoss_dell_p3 ]; then target=dell; fi -if [ $target = wcoss_cray ]; then target=cray; fi - - -cd ufs_utils.fd - -./build_all.sh +CMAKE_OPTS="-DGFS=ON" ./build_all.sh exit diff --git a/sorc/build_upp.sh b/sorc/build_upp.sh index 9732730ba2..67460487a6 100755 --- a/sorc/build_upp.sh +++ b/sorc/build_upp.sh @@ -1,13 +1,33 @@ #! /usr/bin/env bash set -eux -source ./machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) +script_dir=$(dirname "${BASH_SOURCE[0]}") +cd "${script_dir}" || exit 1 + +OPTIND=1 +_opts="" +while getopts ":dov" option; do + case "${option}" in + d) export BUILD_TYPE="DEBUG";; + o) _opts+="-g ";; + v) _opts+="-v ";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done +shift $((OPTIND-1)) # Check final exec folder exists -if [ ! -d "../exec" ]; then +if [[ ! -d "../exec" ]]; then mkdir ../exec fi cd ufs_model.fd/FV3/upp/tests -./compile_upp.sh +# shellcheck disable=SC2086 +./compile_upp.sh ${_opts} diff --git a/sorc/build_ww3prepost.sh b/sorc/build_ww3prepost.sh index e4a4313089..bf78e7b2ac 100755 --- a/sorc/build_ww3prepost.sh +++ b/sorc/build_ww3prepost.sh @@ -1,37 +1,70 @@ -#!/bin/sh +#! /usr/bin/env bash set -x +script_dir=$(dirname "${BASH_SOURCE[0]}") +cd "${script_dir}" || exit 1 + +export RT_COMPILER="intel" +source "${script_dir}/ufs_model.fd/tests/detect_machine.sh" +source "${script_dir}/ufs_model.fd/tests/module-setup.sh" + +# Default settings +APP="S2SWA" + +while getopts "a:v" option; do + case "${option}" in + a) APP="${OPTARG}" ;; + v) export BUILD_VERBOSE="YES";; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac +done + + +# Determine which switch to use +if [[ "${APP}" == "ATMW" ]]; then + ww3switch="model/esmf/switch" +else + ww3switch="model/bin/switch_meshcap" +fi + + # Check final exec folder exists -if [ ! -d "../exec" ]; then +if [[ ! -d "../exec" ]]; then mkdir ../exec fi -finalexecdir=$( pwd -P )/../exec +finalexecdir="$( pwd -P )/../exec" #Determine machine and load modules set +x -source ./machine-setup.sh > /dev/null 2>&1 - -module use ../modulefiles -module load modulefile.ww3.$target +module use "${script_dir}/ufs_model.fd/modulefiles" +module load "ufs_${MACHINE_ID}" set -x #Set WW3 directory, switch, prep and post exes -cd ufs_model.fd/WW3 -export WW3_DIR=$( pwd -P ) -export SWITCHFILE="${WW3_DIR}/model/esmf/switch" +cd ufs_model.fd/WW3 || exit 1 +WW3_DIR=$( pwd -P ) +export WW3_DIR +export SWITCHFILE="${WW3_DIR}/${ww3switch}" # Build exes for prep jobs and post jobs: prep_exes="ww3_grid ww3_prep ww3_prnc ww3_grid" post_exes="ww3_outp ww3_outf ww3_outp ww3_gint ww3_ounf ww3_ounp ww3_grib" #create build directory: -path_build=$WW3_DIR/build_SHRD -mkdir -p $path_build -cd $path_build +path_build="${WW3_DIR}/build_SHRD" +mkdir -p "${path_build}" || exit 1 +cd "${path_build}" || exit 1 echo "Forcing a SHRD build" -echo $(cat ${SWITCHFILE}) > ${path_build}/tempswitch +cat "${SWITCHFILE}" > "${path_build}/tempswitch" sed -e "s/DIST/SHRD/g"\ -e "s/OMPG / /g"\ @@ -41,44 +74,44 @@ sed -e "s/DIST/SHRD/g"\ -e "s/B4B / /g"\ -e "s/PDLIB / /g"\ -e "s/NOGRB/NCEP2/g"\ - ${path_build}/tempswitch > ${path_build}/switch -rm ${path_build}/tempswitch + "${path_build}/tempswitch" > "${path_build}/switch" +rm "${path_build}/tempswitch" -echo "Switch file is $path_build/switch with switches:" -cat $path_build/switch +echo "Switch file is ${path_build}/switch with switches:" +cat "${path_build}/switch" #Build executables: -cmake $WW3_DIR -DSWITCH=$path_build/switch -DCMAKE_INSTALL_PREFIX=install +cmake "${WW3_DIR}" -DSWITCH="${path_build}/switch" -DCMAKE_INSTALL_PREFIX=install rc=$? -if [[ $rc -ne 0 ]] ; then +if (( rc != 0 )); then echo "Fatal error in cmake." - exit $rc + exit "${rc}" fi make -j 8 rc=$? -if [[ $rc -ne 0 ]] ; then +if (( rc != 0 )); then echo "Fatal error in make." - exit $rc + exit "${rc}" fi make install -if [[ $rc -ne 0 ]] ; then +if (( rc != 0 )); then echo "Fatal error in make install." - exit $rc + exit "${rc}" fi # Copy to top-level exe directory -for prog in $prep_exes $post_exes; do - cp $path_build/install/bin/$prog $finalexecdir/ +for prog in ${prep_exes} ${post_exes}; do + cp "${path_build}/install/bin/${prog}" "${finalexecdir}/" rc=$? - if [[ $rc -ne 0 ]] ; then - echo "FATAL: Unable to copy $path_build/$prog to $finalexecdir (Error code $rc)" - exit $rc + if (( rc != 0 )); then + echo "FATAL: Unable to copy ${path_build}/${prog} to ${finalexecdir} (Error code ${rc})" + exit "${rc}" fi done #clean-up build directory: -echo "executables are in $finalexecdir" -echo "cleaning up $path_build" -rm -rf $path_build +echo "executables are in ${finalexecdir}" +echo "cleaning up ${path_build}" +rm -rf "${path_build}" exit 0 diff --git a/sorc/checkout-haiqin.sh b/sorc/checkout-haiqin.sh new file mode 100755 index 0000000000..76d208a666 --- /dev/null +++ b/sorc/checkout-haiqin.sh @@ -0,0 +1,2 @@ + +git clone -b develop-c3 --recursive https://github.com/haiqinli/ufs-weather-model ufs_model.fd_haiqin_fork diff --git a/sorc/checkout-joe.sh b/sorc/checkout-joe.sh new file mode 100755 index 0000000000..146e28836c --- /dev/null +++ b/sorc/checkout-joe.sh @@ -0,0 +1,4 @@ + +git clone -b HFIP2023 --recursive https://github.com/joeolson42/ufs-weather-model ufs_model.fd_joe_fork + + diff --git a/sorc/checkout.sh b/sorc/checkout.sh index 04687ebba3..6fe70f5f9d 100755 --- a/sorc/checkout.sh +++ b/sorc/checkout.sh @@ -1,136 +1,207 @@ -#!/bin/sh -#set -xue -set -x - -while getopts "om:" option; do - case $option in - o) - echo "Received -o flag for optional checkout of operational-only codes" - checkout_gtg="YES" - checkout_wafs="YES" - ;; - m) - echo "Received -m flag with argument, will check out ufs-weather-model hash $OPTARG instead of default" - ufs_model_hash=$OPTARG - ;; - :) - echo "option -$OPTARG needs an argument" - ;; - *) - echo "invalid option -$OPTARG, exiting..." - exit - ;; - esac -done +#! /usr/bin/env bash -topdir=$(pwd) -logdir="${topdir}/logs" -mkdir -p ${logdir} - -echo ufs-weather-model checkout ... -if [[ ! -d ufs_model.fd ]] ; then - git clone https://github.com/ufs-community/ufs-weather-model ufs_model.fd >> ${logdir}/checkout-ufs_model.log 2>&1 - cd ufs_model.fd - git checkout ${ufs_model_hash:-889254a} ## 27Mar2023 - git submodule update --init --recursive - - ################################################################################ - # checkout_gtg - ## yes: The gtg code at NCAR private repository is available for ops. GFS only. - # Only approved persons/groups have access permission. - ## no: No need to check out gtg code for general GFS users. - ################################################################################ - checkout_gtg=${checkout_gtg:-"NO"} - if [[ ${checkout_gtg} == "YES" ]] ; then - cd FV3/upp - ./manage_externals/checkout_externals - cp sorc/post_gtg.fd/*F90 sorc/ncep_post.fd/. - cp sorc/post_gtg.fd/gtg.config.gfs parm/gtg.config.gfs - fi +set +x +set -u + +function usage() { + cat << EOF +Clones and checks out external components necessary for + global workflow. If the directory already exists, skip + cloning and just check out the requested version (unless + -c option is used). + +Usage: ${BASH_SOURCE[0]} [-c][-h][-m ufs_hash][-o] + -c: + Create a fresh clone (delete existing directories) + -h: + Print this help message and exit + -m ufs_hash: + Check out this UFS hash instead of the default + -o: + Check out operational-only code (GTG and WAFS) + -g: + Check out GSI for GSI-based DA + -u: + Check out GDASApp for UFS-based DA +EOF + exit 1 +} + +function checkout() { + # + # Clone or fetch repo, then checkout specific hash and update submodules + # + # Environment variables: + # topdir [default: $(pwd)]: parent directory to your checkout + # logdir [default: $(pwd)]: where you want logfiles written + # CLEAN [default: NO]: whether to delete existing directories and create a fresh clone + # + # Usage: checkout + # + # Arguments + # dir: Directory for the clone + # remote: URL of the remote repository + # version: Commit to check out; should always be a speciifc commit (hash or tag), not a branch + # + # Returns + # Exit code of last failed command, or 0 if successful + # + + dir="$1" + remote="$2" + version="$3" + recursive=${4:-"YES"} + + name=$(echo "${dir}" | cut -d '.' -f 1) + echo "Performing checkout of ${name}" - cd ${topdir} - if [[ -d ufs_model.fd_gsl ]]; then - rsync -avx ufs_model.fd_gsl/ ufs_model.fd/ ## copy over GSL changes not in UFS repository + logfile="${logdir:-$(pwd)}/checkout_${name}.log" + + if [[ -f "${logfile}" ]]; then + rm "${logfile}" + fi + + cd "${topdir}" || exit 1 + if [[ -d "${dir}" && ${CLEAN} == "YES" ]]; then + echo "|-- Removing existing clone in ${dir}" + rm -Rf "${dir}" + fi + if [[ ! -d "${dir}" ]]; then + echo "|-- Cloning from ${remote} into ${dir}" + git clone "${remote}" "${dir}" >> "${logfile}" 2>&1 + status=$? + if ((status > 0)); then + echo " WARNING: Error while cloning ${name}" + echo + return "${status}" fi + cd "${dir}" || exit 1 + else + # Fetch any updates from server + cd "${dir}" || exit 1 + echo "|-- Fetching updates from ${remote}" + git fetch + fi + echo "|-- Checking out ${version}" + git checkout "${version}" >> "${logfile}" 2>&1 + status=$? + if ((status > 0)); then + echo " WARNING: Error while checking out ${version} in ${name}" + echo + return "${status}" + fi + if [[ "${recursive}" == "YES" ]]; then + echo "|-- Updating submodules (if any)" + git submodule update --init --recursive >> "${logfile}" 2>&1 + status=$? + if ((status > 0)); then + echo " WARNING: Error while updating submodules of ${name}" + echo + return "${status}" + fi + fi + echo + return 0 +} + +# Set defaults for variables toggled by options +export CLEAN="NO" +checkout_gsi="NO" +checkout_gdas="NO" +checkout_gtg="NO" +checkout_wafs="NO" + +# Parse command line arguments +while getopts ":chgum:o" option; do + case ${option} in + c) + echo "Received -c flag, will delete any existing directories and start clean" + export CLEAN="YES" + ;; + g) + echo "Received -g flag for optional checkout of GSI-based DA" + checkout_gsi="YES" + ;; + h) usage;; + u) + echo "Received -u flag for optional checkout of UFS-based DA" + checkout_gdas="YES" + ;; + o) + echo "Received -o flag for optional checkout of operational-only codes" + checkout_gtg="YES" + checkout_wafs="YES" + ;; + m) + echo "Received -m flag with argument, will check out ufs-weather-model hash ${OPTARG} instead of default" + ufs_model_hash=${OPTARG} + ;; + :) + echo "option -${OPTARG} needs an argument" + usage + ;; + *) + echo "invalid option -${OPTARG}, exiting..." + usage + ;; + esac +done +shift $((OPTIND-1)) + +topdir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) +export topdir +export logdir="${topdir}/logs" +mkdir -p "${logdir}" + +# The checkout version should always be a speciifc commit (hash or tag), not a branch +errs=0 +checkout "gfs_utils.fd" "https://github.com/NOAA-EMC/gfs-utils" "8965258" ; errs=$((errs + $?)) +checkout "ufs_utils.fd" "https://github.com/ufs-community/UFS_UTILS.git" "72a0471" ; errs=$((errs + $?)) +## 14apr23 checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-2247060}" ; errs=$((errs + $?)) +## 12jun23 ufs +## checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-bf60924}" ; errs=$((errs + $?)) +## 26jun23 ufs +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-ed7fc88}" ; errs=$((errs + $?)) +checkout "verif-global.fd" "https://github.com/NOAA-EMC/EMC_verif-global.git" "c267780" ; errs=$((errs + $?)) -else - echo 'Skip. Directory ufs_model.fd already exists.' -fi - -echo gsi checkout ... -if [[ ! -d gsi.fd ]] ; then - rm -f ${topdir}/checkout-gsi.log - git clone --recursive https://github.com/NOAA-EMC/GSI.git gsi.fd >> ${logdir}/checkout-gsi.log 2>&1 - cd gsi.fd - git checkout a62dec6 - git submodule update --init - cd ${topdir} -else - echo 'Skip. Directory gsi.fd already exists.' +if [[ ${checkout_gsi} == "YES" ]]; then + checkout "gsi_enkf.fd" "https://github.com/NOAA-EMC/GSI.git" "113e307" "NO"; errs=$((errs + $?)) fi -echo gldas checkout ... -if [[ ! -d gldas.fd ]] ; then - rm -f ${topdir}/checkout-gldas.log - git clone https://github.com/NOAA-EMC/GLDAS.git gldas.fd >> ${logdir}/checkout-gldas.fd.log 2>&1 - cd gldas.fd - git checkout gldas_gfsv16_release.v.1.28.0 - cd ${topdir} -else - echo 'Skip. Directory gldas.fd already exists.' +if [[ ${checkout_gdas} == "YES" ]]; then + checkout "gdas.cd" "https://github.com/NOAA-EMC/GDASApp.git" "81675c9"; errs=$((errs + $?)) fi -echo ufs_utils checkout ... -if [[ ! -d ufs_utils.fd ]] ; then - rm -f ${topdir}/checkout-ufs_utils.log - git clone --recursive https://github.com/ufs-community/UFS_UTILS.git ufs_utils.fd >> ${logdir}/checkout-ufs_utils.fd.log 2>&1 - cd ufs_utils.fd - git checkout ufs_utils_1_8_0 - git submodule update --init --recursive - cd ${topdir} - if [[ -d ufs_utils.fd_gsl ]]; then - rsync -avx ufs_utils.fd_gsl/ ufs_utils.fd/ ## copy over GSL changes not in UFS_UTILS repository - fi -else - echo 'Skip. Directory ufs_utils.fd already exists.' +if [[ ${checkout_gsi} == "YES" || ${checkout_gdas} == "YES" ]]; then + checkout "gsi_utils.fd" "https://github.com/NOAA-EMC/GSI-Utils.git" "322cc7b"; errs=$((errs + $?)) + checkout "gsi_monitor.fd" "https://github.com/NOAA-EMC/GSI-Monitor.git" "45783e3"; errs=$((errs + $?)) fi -checkout_wafs=${checkout_wafs:-"NO"} -if [[ ${checkout_wafs} == "YES" ]] ; then - echo EMC_gfs_wafs checkout ... - if [[ ! -d gfs_wafs.fd ]] ; then - rm -f ${topdir}/checkout-gfs_wafs.log - git clone --recursive https://github.com/NOAA-EMC/EMC_gfs_wafs.git gfs_wafs.fd >> ${logdir}/checkout-gfs_wafs.log 2>&1 - cd gfs_wafs.fd - git checkout c2a29a67d9432b4d6fba99eac7797b81d05202b6 - cd ${topdir} - else - echo 'Skip. Directory gfs_wafs.fd already exists.' - fi +if [[ ${checkout_wafs} == "YES" ]]; then + checkout "gfs_wafs.fd" "https://github.com/NOAA-EMC/EMC_gfs_wafs.git" "014a0b8"; errs=$((errs + $?)) fi -echo EMC_verif-global checkout ... -if [[ ! -d verif-global.fd ]] ; then - rm -f ${topdir}/checkout-verif-global.log - git clone --recursive https://github.com/NOAA-EMC/EMC_verif-global.git verif-global.fd >> ${logdir}/checkout-verif-global.log 2>&1 - cd verif-global.fd - # git checkout verif_global_v2.8.0 - git checkout c267780 - cd ${topdir} -else - echo 'Skip. Directory verif-global.fd already exist.' +if [[ ${checkout_gtg} == "YES" ]]; then + ################################################################################ + # checkout_gtg + ## yes: The gtg code at NCAR private repository is available for ops. GFS only. + # Only approved persons/groups have access permission. + ## no: No need to check out gtg code for general GFS users. + ################################################################################ + + echo "Checking out GTG extension for UPP" + cd "${topdir}/ufs_model.fd/FV3/upp" || exit 1 + logfile="${logdir}/checkout_gtg.log" + git -c submodule."post_gtg.fd".update=checkout submodule update --init --recursive >> "${logfile}" 2>&1 + status=$? + if (( status > 0 )); then + echo "WARNING: Error while checking out GTG" + errs=$((errs + status)) + fi fi -#JKHecho aeroconv checkout ... -#JKHif [[ ! -d aeroconv.fd ]] ; then -#JKH rm -f ${topdir}/checkout-aero.log -#JKH git clone https://github.com/NCAR/aeroconv aeroconv.fd >> ${topdir}/checkout-aero.log 2>&1 -#JKH cd aeroconv.fd -#JKH git checkout 24f6ddc -#JKH cd ${topdir} -#JKH ./aero_extract.sh -#JKHelse -#JKH echo 'Skip. Directory aeroconv.fd already exists.' -#JKHfi - -exit 0 +if (( errs > 0 )); then + echo "WARNING: One or more errors encountered during checkout process, please check logs before building" +fi +echo +exit "${errs}" diff --git a/sorc/checkout_ufs.sh b/sorc/checkout_ufs.sh new file mode 100755 index 0000000000..cdee7de3aa --- /dev/null +++ b/sorc/checkout_ufs.sh @@ -0,0 +1,172 @@ +#! /usr/bin/env bash + +set +x +set -u + +function usage() { + cat << EOF +Clones and checks out external components necessary for + global workflow. If the directory already exists, skip + cloning and just check out the requested version (unless + -c option is used). + +Usage: ${BASH_SOURCE[0]} [-c][-h][-m ufs_hash][-o] + -c: + Create a fresh clone (delete existing directories) + -h: + Print this help message and exit + -m ufs_hash: + Check out this UFS hash instead of the default + -o: + Check out operational-only code (GTG and WAFS) + -g: + Check out GSI for GSI-based DA + -u: + Check out GDASApp for UFS-based DA +EOF + exit 1 +} + +function checkout() { + # + # Clone or fetch repo, then checkout specific hash and update submodules + # + # Environment variables: + # topdir [default: $(pwd)]: parent directory to your checkout + # logdir [default: $(pwd)]: where you want logfiles written + # CLEAN [default: NO]: whether to delete existing directories and create a fresh clone + # + # Usage: checkout + # + # Arguments + # dir: Directory for the clone + # remote: URL of the remote repository + # version: Commit to check out; should always be a speciifc commit (hash or tag), not a branch + # + # Returns + # Exit code of last failed command, or 0 if successful + # + + dir="$1" + remote="$2" + version="$3" + recursive=${4:-"YES"} + + name=$(echo "${dir}" | cut -d '.' -f 1) + echo "Performing checkout of ${name}" + + logfile="${logdir:-$(pwd)}/checkout_${name}.log" + + if [[ -f "${logfile}" ]]; then + rm "${logfile}" + fi + + cd "${topdir}" || exit 1 + if [[ -d "${dir}" && ${CLEAN} == "YES" ]]; then + echo "|-- Removing existing clone in ${dir}" + rm -Rf "${dir}" + fi + if [[ ! -d "${dir}" ]]; then + echo "|-- Cloning from ${remote} into ${dir}" + git clone "${remote}" "${dir}" >> "${logfile}" 2>&1 + status=$? + if ((status > 0)); then + echo " WARNING: Error while cloning ${name}" + echo + return "${status}" + fi + cd "${dir}" || exit 1 + else + # Fetch any updates from server + cd "${dir}" || exit 1 + echo "|-- Fetching updates from ${remote}" + git fetch + fi + echo "|-- Checking out ${version}" + git checkout "${version}" >> "${logfile}" 2>&1 + status=$? + if ((status > 0)); then + echo " WARNING: Error while checking out ${version} in ${name}" + echo + return "${status}" + fi + if [[ "${recursive}" == "YES" ]]; then + echo "|-- Updating submodules (if any)" + git submodule update --init --recursive >> "${logfile}" 2>&1 + status=$? + if ((status > 0)); then + echo " WARNING: Error while updating submodules of ${name}" + echo + return "${status}" + fi + fi + echo + if [[ -d ${dir}_gsl ]]; then + echo "syncing ${dir}_gsl...." + rsync -avx ${dir}_gsl/ ${dir}/ ## copy over GSL changes not in UFS repository + fi + return 0 +} + +# Set defaults for variables toggled by options +export CLEAN="NO" +checkout_gsi="NO" +checkout_gdas="NO" +checkout_gtg="NO" +checkout_wafs="NO" +checkout_aeroconv="NO" + +# Parse command line arguments +while getopts ":chgum:o" option; do + case ${option} in + c) + echo "Received -c flag, will delete any existing directories and start clean" + export CLEAN="YES" + ;; + g) + echo "Received -g flag for optional checkout of GSI-based DA" + checkout_gsi="YES" + ;; + h) usage;; + u) + echo "Received -u flag for optional checkout of UFS-based DA" + checkout_gdas="YES" + ;; + o) + echo "Received -o flag for optional checkout of operational-only codes" + checkout_gtg="YES" + checkout_wafs="YES" + ;; + m) + echo "Received -m flag with argument, will check out ufs-weather-model hash ${OPTARG} instead of default" + ufs_model_hash=${OPTARG} + ;; + :) + echo "option -${OPTARG} needs an argument" + usage + ;; + *) + echo "invalid option -${OPTARG}, exiting..." + usage + ;; + esac +done +shift $((OPTIND-1)) + +topdir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &> /dev/null && pwd) +export topdir +export logdir="${topdir}/logs" +mkdir -p "${logdir}" + +# The checkout version should always be a speciifc commit (hash or tag), not a branch +errs=0 +## 14Apr23 checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-2247060}" ; errs=$((errs + $?)) +## 12Jun23 +##checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-bf60924}" ; errs=$((errs + $?)) +## 26Jun23 +checkout "ufs_model.fd" "https://github.com/ufs-community/ufs-weather-model" "${ufs_model_hash:-ed7fc88}" ; errs=$((errs + $?)) +## JKH +if [[ -d ufs_model.fd_gsl ]]; then + rsync -avx ufs_model.fd_gsl/ ufs_model.fd/ ## copy over GSL changes not in UFS repository +fi +## JKH diff --git a/sorc/enkf_chgres_recenter.fd/.gitignore b/sorc/enkf_chgres_recenter.fd/.gitignore deleted file mode 100644 index 544aec4c42..0000000000 --- a/sorc/enkf_chgres_recenter.fd/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -*.exe -*.o -*.mod diff --git a/sorc/enkf_chgres_recenter.fd/driver.f90 b/sorc/enkf_chgres_recenter.fd/driver.f90 deleted file mode 100644 index 02a138ae8f..0000000000 --- a/sorc/enkf_chgres_recenter.fd/driver.f90 +++ /dev/null @@ -1,65 +0,0 @@ - program recenter - - use setup, only : program_setup - use interp, only : gaus_to_gaus, adjust_for_terrain - use input_data, only : read_input_data, & - read_vcoord_info - use output_data, only : set_output_grid, write_output_data - - implicit none - - call w3tagb('CHGRES_RECENTER',2018,0179,0055,'NP20') - - print*,"STARTING PROGRAM" - -!-------------------------------------------------------- -! Read configuration namelist. -!-------------------------------------------------------- - - call program_setup - -!-------------------------------------------------------- -! Read input grid data -!-------------------------------------------------------- - - call read_input_data - -!-------------------------------------------------------- -! Read vertical coordinate info -!-------------------------------------------------------- - - call read_vcoord_info - -!-------------------------------------------------------- -! Get output grid specs -!-------------------------------------------------------- - - call set_output_grid - -!-------------------------------------------------------- -! Interpolate data to output grid -!-------------------------------------------------------- - - call gaus_to_gaus - -!-------------------------------------------------------- -! Adjust output fields for differences between -! interpolated and external terrain. -!-------------------------------------------------------- - - call adjust_for_terrain - -!-------------------------------------------------------- -! Write output data to file. -!-------------------------------------------------------- - - call write_output_data - - print* - print*,"PROGRAM FINISHED NORMALLY!" - - call w3tage('CHGRES_RECENTER') - - stop - - end program recenter diff --git a/sorc/enkf_chgres_recenter.fd/input_data.f90 b/sorc/enkf_chgres_recenter.fd/input_data.f90 deleted file mode 100644 index 704aa58c8d..0000000000 --- a/sorc/enkf_chgres_recenter.fd/input_data.f90 +++ /dev/null @@ -1,383 +0,0 @@ - module input_data - - use nemsio_module - use utils - use setup - - implicit none - - private - - integer, public :: idvc, idsl, idvm, nvcoord - integer, public :: ntrac, ncldt,icldamt - integer, public :: ij_input, kgds_input(200) - integer(nemsio_intkind), public :: i_input, j_input, lev - integer(nemsio_intkind), public :: idate(7) - - logical, public :: gfdl_mp - - real, allocatable, public :: vcoord(:,:) - real, allocatable, public :: clwmr_input(:,:) - real, allocatable, public :: dzdt_input(:,:) - real, allocatable, public :: grle_input(:,:) - real, allocatable, public :: cldamt_input(:,:) - real, allocatable, public :: hgt_input(:) - real, allocatable, public :: icmr_input(:,:) - real, allocatable, public :: o3mr_input(:,:) - real, allocatable, public :: rwmr_input(:,:) - real, allocatable, public :: sfcp_input(:) - real, allocatable, public :: snmr_input(:,:) - real, allocatable, public :: spfh_input(:,:) - real, allocatable, public :: tmp_input(:,:) - real, allocatable, public :: ugrd_input(:,:) - real, allocatable, public :: vgrd_input(:,:) - - public :: read_input_data - public :: read_vcoord_info - - contains - - subroutine read_input_data - -!------------------------------------------------------------------------------------- -! Read input grid data from a nemsio file. -!------------------------------------------------------------------------------------- - - implicit none - - character(len=20) :: vlevtyp, vname - character(len=50), allocatable :: recname(:) - - integer(nemsio_intkind) :: vlev, iret, idum, nrec - integer :: n - - real(nemsio_realkind), allocatable :: dummy(:) - - type(nemsio_gfile) :: gfile - - call nemsio_init(iret) - - print* - print*,"OPEN INPUT FILE: ",trim(input_file) - call nemsio_open(gfile, input_file, "read", iret=iret) - if (iret /= 0) then - print*,"FATAL ERROR OPENING FILE: ",trim(input_file) - print*,"IRET IS: ", iret - call errexit(2) - endif - - print*,"GET INPUT FILE HEADER" - call nemsio_getfilehead(gfile, iret=iret, nrec=nrec, idate=idate, & - dimx=i_input, dimy=j_input, dimz=lev) - if (iret /= 0) goto 67 - - print*,'DIMENSIONS OF DATA ARE: ', i_input, j_input, lev - print*,'DATE OF DATA IS: ', idate - - ij_input = i_input * j_input - - allocate(recname(nrec)) - - call nemsio_getfilehead(gfile, iret=iret, recname=recname) - if (iret /= 0) goto 67 - - gfdl_mp = .false. ! Zhao-Carr MP - do n = 1, nrec - if (trim(recname(n)) == "icmr") then - gfdl_mp = .true. ! GFDL MP - exit - endif - enddo - - icldamt = 0 - do n = 1, nrec - if (trim(recname(n)) == "cld_amt") then - icldamt = 1 ! 3D cloud amount present - exit - endif - enddo - - call nemsio_getfilehead(gfile, iret=iret, idvc=idum) - if (iret /= 0) goto 67 - idvc = idum - print*,'IDVC IS: ', idvc - - call nemsio_getfilehead(gfile, iret=iret, idsl=idum) - if (iret /= 0) goto 67 - idsl = idum - print*,'IDSL IS: ', idsl - - call nemsio_getfilehead(gfile, iret=iret, idvm=idum) - if (iret /= 0) goto 67 - idvm = idum - print*,'IDVM IS: ', idvm - - if (gfdl_mp) then - ntrac = 7 + icldamt - ncldt = 5 - else - ntrac = 3 - ncldt = 1 - endif - - allocate(dummy(ij_input)) - - ! figure out the sign of delz - print*,"READ DELZ FOR SIGN CHECK" - vlev = 1 - vlevtyp = "mid layer" - vname = "delz" - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - if ( sum(dummy) > 0 ) then - flipdelz = .false. - print*,"DELZ IS POSITIVE" - else - flipdelz = .true. - print*,"DELZ IS NEGATIVE" - end if - - print* - print*,"READ SURFACE PRESSURE" - vlev = 1 - vlevtyp = "sfc" - vname = "pres" - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - - allocate(sfcp_input(ij_input)) - sfcp_input = dummy - print*,'MAX/MIN SURFACE PRESSURE: ',maxval(sfcp_input), minval(sfcp_input) - - print* - print*,"READ SURFACE HEIGHT" - vlev = 1 - vlevtyp = "sfc" - vname = "hgt" - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - - allocate(hgt_input(ij_input)) - hgt_input = dummy - print*,'MAX/MIN SURFACE HEIGHT: ',maxval(hgt_input), minval(hgt_input) - - print* - print*,"READ U WIND" - vname = "ugrd" - vlevtyp = "mid layer" - allocate(ugrd_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - ugrd_input(:,vlev) = dummy - print*,'MAX/MIN U WIND AT LEVEL ',vlev, "IS: ", maxval(ugrd_input(:,vlev)), minval(ugrd_input(:,vlev)) - enddo - - print* - print*,"READ V WIND" - vname = "vgrd" - vlevtyp = "mid layer" - allocate(vgrd_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - vgrd_input(:,vlev) = dummy - print*,'MAX/MIN V WIND AT LEVEL ', vlev, "IS: ", maxval(vgrd_input(:,vlev)), minval(vgrd_input(:,vlev)) - enddo - - print* - print*,"READ TEMPERATURE" - vname = "tmp" - vlevtyp = "mid layer" - allocate(tmp_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - tmp_input(:,vlev) = dummy(:) - print*,'MAX/MIN TEMPERATURE AT LEVEL ', vlev, 'IS: ', maxval(tmp_input(:,vlev)), minval(tmp_input(:,vlev)) - enddo - - print* - print*,"READ SPECIFIC HUMIDITY" - vname = "spfh" - vlevtyp = "mid layer" - allocate(spfh_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - spfh_input(:,vlev) = dummy - print*,'MAX/MIN SPECIFIC HUMIDITY AT LEVEL ', vlev, 'IS: ', maxval(spfh_input(:,vlev)), minval(spfh_input(:,vlev)) - enddo - - print* - print*,"READ CLOUD LIQUID WATER" - vname = "clwmr" - vlevtyp = "mid layer" - allocate(clwmr_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - clwmr_input(:,vlev) = dummy - print*,'MAX/MIN CLOUD LIQUID WATER AT LEVEL ', vlev, 'IS: ', maxval(clwmr_input(:,vlev)), minval(clwmr_input(:,vlev)) - enddo - - print* - print*,"READ OZONE" - vname = "o3mr" - vlevtyp = "mid layer" - allocate(o3mr_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - o3mr_input(:,vlev) = dummy - print*,'MAX/MIN OZONE AT LEVEL ', vlev, 'IS: ', maxval(o3mr_input(:,vlev)), minval(o3mr_input(:,vlev)) - enddo - - print* - print*,"READ DZDT" - vname = "dzdt" - vlevtyp = "mid layer" - allocate(dzdt_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - dzdt_input(:,vlev) = dummy - print*,'MAX/MIN DZDT AT LEVEL ', vlev, 'IS: ', maxval(dzdt_input(:,vlev)), minval(dzdt_input(:,vlev)) - enddo - - if (gfdl_mp) then - - print* - print*,"READ RWMR" - vname = "rwmr" - vlevtyp = "mid layer" - allocate(rwmr_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - rwmr_input(:,vlev) = dummy - print*,'MAX/MIN RWMR AT LEVEL ', vlev, 'IS: ', maxval(rwmr_input(:,vlev)), minval(rwmr_input(:,vlev)) - enddo - - print* - print*,"READ ICMR" - vname = "icmr" - vlevtyp = "mid layer" - allocate(icmr_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - icmr_input(:,vlev) = dummy - print*,'MAX/MIN ICMR AT LEVEL ', vlev, 'IS: ', maxval(icmr_input(:,vlev)), minval(icmr_input(:,vlev)) - enddo - - print* - print*,"READ SNMR" - vname = "snmr" - vlevtyp = "mid layer" - allocate(snmr_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - snmr_input(:,vlev) = dummy - print*,'MAX/MIN SNMR AT LEVEL ', vlev, 'IS: ', maxval(snmr_input(:,vlev)), minval(snmr_input(:,vlev)) - enddo - - print* - print*,"READ GRLE" - vname = "grle" - vlevtyp = "mid layer" - allocate(grle_input(ij_input,lev)) - do vlev = 1, lev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - grle_input(:,vlev) = dummy - print*,'MAX/MIN GRLE AT LEVEL ', vlev, 'IS: ', maxval(grle_input(:,vlev)), minval(grle_input(:,vlev)) - enddo - - if (icldamt == 1) then - print* - print*,"READ CLD_AMT" - vname = "cld_amt" - vlevtyp = "mid layer" - allocate(cldamt_input(ij_input,lev)) - do vlev = 1, lev - write(6,*) 'read ',vname,' on ',vlev - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) goto 67 - cldamt_input(:,vlev) = dummy - print*,'MAX/MIN CLD_AMT AT LEVEL ', vlev, 'IS: ', maxval(cldamt_input(:,vlev)), minval(cldamt_input(:,vlev)) - enddo - endif - - endif - - deallocate(dummy) - - print*,"CLOSE FILE" - call nemsio_close(gfile, iret=iret) - - call nemsio_finalize() - -!--------------------------------------------------------------------------------------- -! Set the grib 1 grid description array need by the NCEP IPOLATES library. -!--------------------------------------------------------------------------------------- - - call calc_kgds(i_input, j_input, kgds_input) - - return - - 67 continue - - print*,"FATAL ERROR READING FILE: ", trim(input_file) - print*,"IRET IS: ", iret - call errexit(3) - - end subroutine read_input_data - - subroutine read_vcoord_info - -!--------------------------------------------------------------------------------- -! Read vertical coordinate information. -!--------------------------------------------------------------------------------- - - implicit none - - integer :: istat, levs_vcoord, n, k - - print* - print*,"OPEN VERTICAL COORD FILE: ", trim(vcoord_file) - open(14, file=trim(vcoord_file), form='formatted', iostat=istat) - if (istat /= 0) then - print*,"FATAL ERROR OPENING FILE. ISTAT IS: ", istat - call errexit(4) - endif - - read(14, *, iostat=istat) nvcoord, levs_vcoord - if (istat /= 0) then - print*,"FATAL ERROR READING FILE HEADER. ISTAT IS: ",istat - call errexit(5) - endif - -!--------------------------------------------------------------------------------- -! The last value in the file is not used for the fv3 core. Only read the first -! (lev + 1) values. -!--------------------------------------------------------------------------------- - - allocate(vcoord(lev+1, nvcoord)) - read(14, *, iostat=istat) ((vcoord(n,k), k=1,nvcoord), n=1,lev+1) - if (istat /= 0) then - print*,"FATAL ERROR READING FILE. ISTAT IS: ",istat - call errexit(6) - endif - - print* - do k = 1, (lev+1) - print*,'VCOORD FOR LEV ', k, 'IS: ', vcoord(k,:) - enddo - - close(14) - - end subroutine read_vcoord_info - - end module input_data diff --git a/sorc/enkf_chgres_recenter.fd/interp.f90 b/sorc/enkf_chgres_recenter.fd/interp.f90 deleted file mode 100644 index bb2afedbc3..0000000000 --- a/sorc/enkf_chgres_recenter.fd/interp.f90 +++ /dev/null @@ -1,552 +0,0 @@ - module interp - - use nemsio_module - - implicit none - - private - - real, allocatable :: sfcp_b4_adj_output(:) - real, allocatable :: clwmr_b4_adj_output(:,:) - real, allocatable :: dzdt_b4_adj_output(:,:) - real, allocatable :: grle_b4_adj_output(:,:) - real, allocatable :: cldamt_b4_adj_output(:,:) - real, allocatable :: icmr_b4_adj_output(:,:) - real, allocatable :: o3mr_b4_adj_output(:,:) - real, allocatable :: rwmr_b4_adj_output(:,:) - real, allocatable :: snmr_b4_adj_output(:,:) - real, allocatable :: spfh_b4_adj_output(:,:) - real, allocatable :: tmp_b4_adj_output(:,:) - real, allocatable :: ugrd_b4_adj_output(:,:) - real, allocatable :: vgrd_b4_adj_output(:,:) - - public :: adjust_for_terrain - public :: gaus_to_gaus - - contains - - subroutine adjust_for_terrain - -!--------------------------------------------------------------------------------- -! Adjust fields based on differences between the interpolated and external -! terrain. -!--------------------------------------------------------------------------------- - - use input_data - use output_data - use utils - use setup - - implicit none - - integer :: k - - real, allocatable :: pres_b4_adj_output(:,:) - real, allocatable :: pres_output(:,:) - real, allocatable :: q_b4_adj_output(:,:,:), q_output(:,:,:) - -!--------------------------------------------------------------------------------- -! First, compute the mid-layer pressure using the interpolated surface pressure. -!--------------------------------------------------------------------------------- - - allocate(pres_b4_adj_output(ij_output,lev)) - pres_b4_adj_output = 0.0 - - print* - print*,"COMPUTE MID-LAYER PRESSURE FROM INTERPOLATED SURFACE PRESSURE." - call newpr1(ij_output, lev, idvc, idsl, nvcoord, vcoord, & - sfcp_b4_adj_output, pres_b4_adj_output) - -!print*,'after newpr1, pres b4 adj: ', pres_b4_adj_output(ij_output/2,:) - -!--------------------------------------------------------------------------------- -! Adjust surface pressure based on differences between interpolated and -! grid terrain. -!--------------------------------------------------------------------------------- - - allocate(sfcp_output(ij_output)) - sfcp_output = 0.0 - - print*,"ADJUST SURFACE PRESSURE BASED ON TERRAIN DIFFERENCES" - call newps(hgt_output, sfcp_b4_adj_output, ij_output, & - lev, pres_b4_adj_output, tmp_b4_adj_output, & - spfh_b4_adj_output, hgt_external_output, sfcp_output) - -!print*,'after newps ',sfcp_b4_adj_output(ij_output/2),sfcp_output(ij_output/2) - - deallocate(sfcp_b4_adj_output) - -!--------------------------------------------------------------------------------- -! Recompute mid-layer pressure based on the adjusted surface pressure. -!--------------------------------------------------------------------------------- - - allocate(pres_output(ij_output, lev)) - pres_output = 0.0 - - allocate(dpres_output(ij_output, lev)) - dpres_output = 0.0 - - print*,"RECOMPUTE MID-LAYER PRESSURE." - call newpr1(ij_output, lev, idvc, idsl, nvcoord, vcoord, & - sfcp_output, pres_output, dpres_output) - -!do k = 1, lev -! print*,'after newpr1 ',pres_b4_adj_output(ij_output/2,k),pres_output(ij_output/2,k), dpres_output(ij_output/2,k) -!enddo - -!--------------------------------------------------------------------------------- -! Vertically interpolate from the pre-adjusted to the adjusted mid-layer -! pressures. -!--------------------------------------------------------------------------------- - - allocate(q_b4_adj_output(ij_output,lev,ntrac)) - q_b4_adj_output(:,:,1) = spfh_b4_adj_output(:,:) - q_b4_adj_output(:,:,2) = o3mr_b4_adj_output(:,:) - q_b4_adj_output(:,:,3) = clwmr_b4_adj_output(:,:) - if (gfdl_mp) then - q_b4_adj_output(:,:,4) = rwmr_b4_adj_output(:,:) - q_b4_adj_output(:,:,5) = icmr_b4_adj_output(:,:) - q_b4_adj_output(:,:,6) = snmr_b4_adj_output(:,:) - q_b4_adj_output(:,:,7) = grle_b4_adj_output(:,:) - if (icldamt == 1) q_b4_adj_output(:,:,8) = cldamt_b4_adj_output(:,:) - endif - - allocate(q_output(ij_output,lev,ntrac)) - q_output = 0.0 - - allocate(dzdt_output(ij_output,lev)) - dzdt_output = 0.0 - - allocate(ugrd_output(ij_output,lev)) - ugrd_output=0.0 - - allocate(vgrd_output(ij_output,lev)) - vgrd_output=0.0 - - allocate(tmp_output(ij_output,lev)) - tmp_output=0.0 - - print*,"VERTICALLY INTERPOLATE TO NEW PRESSURE LEVELS" - call vintg(ij_output, lev, lev, ntrac, pres_b4_adj_output, & - ugrd_b4_adj_output, vgrd_b4_adj_output, tmp_b4_adj_output, q_b4_adj_output, & - dzdt_b4_adj_output, pres_output, ugrd_output, vgrd_output, tmp_output, & - q_output, dzdt_output) - - deallocate (dzdt_b4_adj_output, q_b4_adj_output) - deallocate (pres_b4_adj_output, pres_output) - - allocate(spfh_output(ij_output,lev)) - spfh_output = q_output(:,:,1) - allocate(o3mr_output(ij_output,lev)) - o3mr_output = q_output(:,:,2) - allocate(clwmr_output(ij_output,lev)) - clwmr_output = q_output(:,:,3) - if (gfdl_mp) then - allocate(rwmr_output(ij_output,lev)) - rwmr_output = q_output(:,:,4) - allocate(icmr_output(ij_output,lev)) - icmr_output = q_output(:,:,5) - allocate(snmr_output(ij_output,lev)) - snmr_output = q_output(:,:,6) - allocate(grle_output(ij_output,lev)) - grle_output = q_output(:,:,7) - if (icldamt == 1) then - allocate(cldamt_output(ij_output,lev)) - cldamt_output = q_output(:,:,8) - endif - endif - - deallocate(q_output) - -!do k = 1, lev -!print*,'after vintg tmp ',tmp_b4_adj_output(ij_output/2,k),tmp_output(ij_output/2,k) -!enddo - - deallocate(tmp_b4_adj_output) - -!do k = 1, lev -!print*,'after vintg u ',ugrd_b4_adj_output(ij_output/2,k),ugrd_output(ij_output/2,k) -!enddo - - deallocate(ugrd_b4_adj_output) - -!do k = 1, lev -!print*,'after vintg v ',vgrd_b4_adj_output(ij_output/2,k),vgrd_output(ij_output/2,k) -!enddo - - deallocate(vgrd_b4_adj_output) - -!do k = 1, lev -!print*,'after vintg spfh ',spfh_b4_adj_output(ij_output/2,k),spfh_output(ij_output/2,k) -!enddo - - deallocate(spfh_b4_adj_output) - -!do k = 1, lev -!print*,'after vintg o3 ',o3mr_b4_adj_output(ij_output/2,k),o3mr_output(ij_output/2,k) -!enddo - - deallocate(o3mr_b4_adj_output) - -!do k = 1, lev -!print*,'after vintg clw ',clwmr_b4_adj_output(ij_output/2,k),clwmr_output(ij_output/2,k) -!enddo - - deallocate(clwmr_b4_adj_output) - - if (gfdl_mp) then - -! do k = 1, lev -! print*,'after vintg rw ',rwmr_b4_adj_output(ij_output/2,k),rwmr_output(ij_output/2,k) -! enddo - - deallocate(rwmr_b4_adj_output) - -! do k = 1, lev -! print*,'after vintg ic ',icmr_b4_adj_output(ij_output/2,k),icmr_output(ij_output/2,k) -! enddo - - deallocate(icmr_b4_adj_output) - -! do k = 1, lev -! print*,'after vintg sn ',snmr_b4_adj_output(ij_output/2,k),snmr_output(ij_output/2,k) -! enddo - - deallocate(snmr_b4_adj_output) - -! do k = 1, lev -! print*,'after vintg grle ',grle_b4_adj_output(ij_output/2,k),grle_output(ij_output/2,k) -! enddo - - deallocate(grle_b4_adj_output) - - if (icldamt == 1) then -! do k = 1, lev -! print*,'after vintg cld_amt ',cldamt_b4_adj_output(ij_output/2,k),cldamt_output(ij_output/2,k) -! enddo - - deallocate(cldamt_b4_adj_output) - endif - - - endif - - allocate(delz_output(ij_output, lev)) - delz_output = 0.0 - - call compute_delz(ij_output, lev, vcoord(:,1), vcoord(:,2), sfcp_output, hgt_output, & - tmp_output, spfh_output, delz_output, flipdelz) - - deallocate(hgt_output) - - end subroutine adjust_for_terrain - - subroutine gaus_to_gaus - -!---------------------------------------------------------------------------------- -! Interpolate data from the input to output grid using IPOLATES library. -!---------------------------------------------------------------------------------- - - use output_data - use input_data - use setup - - implicit none - - integer :: ip, ipopt(20) - integer :: num_fields - integer :: iret, numpts - integer, allocatable :: ibi(:), ibo(:) - - logical*1, allocatable :: bitmap_input(:,:), bitmap_output(:,:) - - real, allocatable :: data_input(:,:) - real, allocatable :: data_output(:,:), crot(:), srot(:) - - print* - print*,'INTERPOLATE DATA TO OUTPUT GRID' - - ip = 0 ! bilinear - ipopt = 0 - -!---------------------------------------------------------------------------------- -! Do 2-D fields first -!---------------------------------------------------------------------------------- - - num_fields = 1 - - allocate(ibi(num_fields)) - ibi = 0 ! no bitmap - allocate(ibo(num_fields)) - ibo = 0 ! no bitmap - - allocate(bitmap_input(ij_input,num_fields)) - bitmap_input = .true. - allocate(bitmap_output(ij_output,num_fields)) - bitmap_output = .true. - - allocate(rlat_output(ij_output)) - rlat_output = 0.0 - allocate(rlon_output(ij_output)) - rlon_output = 0.0 - -!---------------- -! Surface height -!---------------- - - allocate(data_input(ij_input,num_fields)) - data_input(:,num_fields) = hgt_input(:) - deallocate(hgt_input) - - allocate(data_output(ij_output,num_fields)) - data_output = 0 - - print*,"INTERPOLATE SURFACE HEIGHT" - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, data_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - data_output, iret) - if (iret /= 0) goto 89 - - allocate(hgt_output(ij_output)) - hgt_output = data_output(:,num_fields) - -!------------------ -! surface pressure -!------------------ - - data_input(:,num_fields) = sfcp_input(:) - deallocate(sfcp_input) - - print*,"INTERPOLATE SURFACE PRESSURE" - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, data_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - data_output, iret) - if (iret /= 0) goto 89 - - allocate(sfcp_b4_adj_output(ij_output)) - sfcp_b4_adj_output = data_output(:,num_fields) - - deallocate(ibi, ibo, bitmap_input, bitmap_output, data_input, data_output) - -!---------------------------------------------------------------------------------- -! 3d scalars -!---------------------------------------------------------------------------------- - - num_fields = lev - - allocate(ibi(num_fields)) - ibi = 0 ! no bitmap - allocate(ibo(num_fields)) - ibo = 0 ! no bitmap - - allocate(bitmap_input(ij_input,num_fields)) - bitmap_input = .true. - allocate(bitmap_output(ij_output,num_fields)) - bitmap_output = .true. - -!------------- -! Temperature -!------------- - - allocate(tmp_b4_adj_output(ij_output,num_fields)) - tmp_b4_adj_output = 0 - - print*,'INTERPOLATE TEMPERATURE' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, tmp_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - tmp_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(tmp_input) - -!-------------------- -! Cloud liquid water -!-------------------- - - allocate(clwmr_b4_adj_output(ij_output,num_fields)) - clwmr_b4_adj_output = 0 - - print*,'INTERPOLATE CLOUD LIQUID WATER' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, clwmr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - clwmr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(clwmr_input) - -!-------------------- -! Specific humidity -!-------------------- - - allocate(spfh_b4_adj_output(ij_output,num_fields)) - spfh_b4_adj_output = 0 - - print*,'INTERPOLATE SPECIFIC HUMIDITY' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, spfh_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - spfh_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(spfh_input) - -!----------- -! Ozone -!----------- - - allocate(o3mr_b4_adj_output(ij_output,num_fields)) - o3mr_b4_adj_output = 0 - - print*,'INTERPOLATE OZONE' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, o3mr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - o3mr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(o3mr_input) - -!----------- -! DZDT -!----------- - - allocate(dzdt_b4_adj_output(ij_output,num_fields)) - dzdt_b4_adj_output = 0 - - print*,'INTERPOLATE DZDT' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, dzdt_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - dzdt_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(dzdt_input) - -!---------------------------------------------------------------------------------- -! Interpolate additional 3-d scalars for GFDL microphysics. -!---------------------------------------------------------------------------------- - - if (gfdl_mp) then - -!------------- -! Rain water -!------------- - - allocate(rwmr_b4_adj_output(ij_output,num_fields)) - rwmr_b4_adj_output = 0 - - print*,'INTERPOLATE RWMR' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, rwmr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - rwmr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(rwmr_input) - -!------------- -! Snow water -!------------- - - allocate(snmr_b4_adj_output(ij_output,num_fields)) - snmr_b4_adj_output = 0 - - print*,'INTERPOLATE SNMR' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, snmr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - snmr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(snmr_input) - -!------------- -! Ice water -!------------- - - allocate(icmr_b4_adj_output(ij_output,num_fields)) - icmr_b4_adj_output = 0 - - print*,'INTERPOLATE ICMR' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, icmr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - icmr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(icmr_input) - -!------------- -! Graupel -!------------- - - allocate(grle_b4_adj_output(ij_output,num_fields)) - grle_b4_adj_output = 0 - - print*,'INTERPOLATE GRLE' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, grle_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - grle_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(grle_input) - -!--------------------------- -! Cloud amount (if present) -!--------------------------- - - if (icldamt == 1) then - allocate(cldamt_b4_adj_output(ij_output,num_fields)) - cldamt_b4_adj_output = 0 - - print*,'INTERPOLATE CLD_AMT' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, cldamt_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - cldamt_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(cldamt_input) - endif - - - endif - -!---------------------------------------------------------------------------------- -! 3d u/v winds -!---------------------------------------------------------------------------------- - - allocate(crot(ij_output), srot(ij_output)) - crot = 0. - srot = 0. - - allocate(ugrd_b4_adj_output(ij_output,num_fields)) - ugrd_b4_adj_output = 0 - allocate(vgrd_b4_adj_output(ij_output,num_fields)) - vgrd_b4_adj_output = 0 - - print*,'INTERPOLATE WINDS' - call ipolatev(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, ugrd_input, vgrd_input, & - numpts, rlat_output, rlon_output, crot, srot, ibo, bitmap_output, & - ugrd_b4_adj_output, vgrd_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate (ugrd_input, vgrd_input) - deallocate (crot, srot) - deallocate (ibi, ibo, bitmap_input, bitmap_output) - - return - - 89 continue - print*,"FATAL ERROR IN IPOLATES. IRET IS: ", iret - call errexit(23) - - end subroutine gaus_to_gaus - - end module interp diff --git a/sorc/enkf_chgres_recenter.fd/makefile b/sorc/enkf_chgres_recenter.fd/makefile deleted file mode 100755 index 2a5f36b369..0000000000 --- a/sorc/enkf_chgres_recenter.fd/makefile +++ /dev/null @@ -1,27 +0,0 @@ -SHELL= /bin/sh - -LIBS= $(NEMSIO_LIB) $(BACIO_LIB4) $(W3NCO_LIBd) $(IP_LIBd) $(SP_LIBd) - -CMD= enkf_chgres_recenter.x - -OBJS = driver.o input_data.o interp.o output_data.o utils.o setup.o - -$(CMD): $(OBJS) - $(FC) $(FFLAGS) -o $(CMD) $(OBJS) $(LIBS) - -driver.o: setup.o output_data.o interp.o input_data.o driver.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c driver.f90 -interp.o: setup.o utils.o output_data.o input_data.o interp.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c interp.f90 -input_data.o: setup.o utils.o input_data.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c input_data.f90 -output_data.o: setup.o utils.o input_data.o output_data.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c output_data.f90 -setup.o: setup.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c setup.f90 -utils.o: utils.f90 - $(FC) $(FFLAGS) -I$(NEMSIO_INC) -c utils.f90 -clean: - rm -f *.o *.mod ${CMD} -install: - -cp $(CMD) ../../exec/. diff --git a/sorc/enkf_chgres_recenter.fd/output_data.f90 b/sorc/enkf_chgres_recenter.fd/output_data.f90 deleted file mode 100644 index 36063d3a06..0000000000 --- a/sorc/enkf_chgres_recenter.fd/output_data.f90 +++ /dev/null @@ -1,396 +0,0 @@ - module output_data - - use nemsio_module - - implicit none - - private - - integer, public :: kgds_output(200) - -! data on the output grid. - real, allocatable, public :: hgt_output(:) ! interpolated from input grid - real, allocatable, public :: hgt_external_output(:) - real, allocatable, public :: sfcp_output(:) - real, allocatable, public :: tmp_output(:,:) - real, allocatable, public :: clwmr_output(:,:) - real, allocatable, public :: delz_output(:,:) - real, allocatable, public :: dpres_output(:,:) - real, allocatable, public :: dzdt_output(:,:) - real, allocatable, public :: o3mr_output(:,:) - real, allocatable, public :: spfh_output(:,:) - real, allocatable, public :: ugrd_output(:,:) - real, allocatable, public :: vgrd_output(:,:) - real, allocatable, public :: rwmr_output(:,:) - real, allocatable, public :: icmr_output(:,:) - real, allocatable, public :: snmr_output(:,:) - real, allocatable, public :: grle_output(:,:) - real, allocatable, public :: cldamt_output(:,:) - real, allocatable, public :: rlat_output(:) - real, allocatable, public :: rlon_output(:) - - public :: set_output_grid - public :: write_output_data - - character(len=50), allocatable :: recname(:) - character(len=50), allocatable :: reclevtyp(:) - - integer(nemsio_intkind) :: nrec - integer(nemsio_intkind), allocatable :: reclev(:) - - real(nemsio_realkind), allocatable :: vcoord_header(:,:,:) - real(nemsio_realkind), allocatable :: lat(:), lon(:) - - contains - - subroutine set_output_grid - -!------------------------------------------------------------------- -! Set grid specs on the output grid. -!------------------------------------------------------------------- - - use setup - use input_data - use utils - - implicit none - - character(len=20) :: vlevtyp, vname - - integer(nemsio_intkind) :: vlev - integer :: iret - - real(nemsio_realkind), allocatable :: dummy(:) - - type(nemsio_gfile) :: gfile - - print* - print*,"OUTPUT GRID I/J DIMENSIONS: ", i_output, j_output - -!------------------------------------------------------------------- -! Set the grib 1 grid description section, which is needed -! by the IPOLATES library. -!------------------------------------------------------------------- - - kgds_output = 0 - - call calc_kgds(i_output, j_output, kgds_output) - -!------------------------------------------------------------------- -! Read the terrain on the output grid. To ensure exact match, -! read it from an existing enkf nemsio restart file. -!------------------------------------------------------------------- - - call nemsio_init(iret) - - print* - print*,"OPEN OUTPUT GRID TERRAIN FILE: ", trim(terrain_file) - call nemsio_open(gfile, terrain_file, "read", iret=iret) - if (iret /= 0) then - print*,"FATAL ERROR OPENING FILE: ",trim(terrain_file) - print*,"IRET IS: ", iret - call errexit(50) - endif - - allocate(dummy(ij_output)) - allocate(hgt_external_output(ij_output)) - - print* - print*,"READ SURFACE HEIGHT" - vlev = 1 - vlevtyp = "sfc" - vname = "hgt" - call nemsio_readrecv(gfile, vname, vlevtyp, vlev, dummy, 0, iret) - if (iret /= 0) then - print*,"FATAL ERROR READING FILE: ",trim(terrain_file) - print*,"IRET IS: ", iret - call errexit(51) - endif - - hgt_external_output = dummy - - deallocate(dummy) - - call nemsio_close(gfile, iret=iret) - - call nemsio_finalize() - - end subroutine set_output_grid - - subroutine write_output_data - -!------------------------------------------------------------------- -! Write output grid data to a nemsio file. -!------------------------------------------------------------------- - - use input_data - use setup - - implicit none - - character(len=5) :: gaction - - integer :: n, iret - - real(nemsio_realkind), allocatable :: dummy(:) - - type(nemsio_gfile) :: gfile - -!------------------------------------------------------------------- -! Set up some header info. -!------------------------------------------------------------------- - - call header_set - -!------------------------------------------------------------------- -! Open and write file. -!------------------------------------------------------------------- - - call nemsio_init(iret) - - gaction="write" - - print* - print*,'OPEN OUTPUT FILE: ',trim(output_file) - call nemsio_open(gfile, output_file, gaction, iret=iret, gdatatype="bin4", & - nmeta=8, modelname="FV3GFS", nrec=nrec, & - idate=idate, dimx=i_output, & - dimy=j_output, dimz=lev, ntrac=ntrac, & - ncldt=ncldt, idvc=idvc, idsl=idsl, idvm=idvm, & - idrt=4, recname=recname, reclevtyp=reclevtyp, & - reclev=reclev,vcoord=vcoord_header, & - lat=lat, lon=lon) - if (iret/=0) then - print*,"FATAL ERROR OPENING FILE. IRET IS: ", iret - call errexit(9) - endif - - deallocate(lon, lat, recname, reclevtyp, reclev, vcoord_header) - - allocate(dummy(i_output*j_output)) - - print*,"WRITE SURFACE HEIGHT" - dummy = hgt_external_output - call nemsio_writerecv(gfile, "hgt", "sfc", 1, dummy, iret=iret) - if (iret/=0) goto 88 - deallocate(hgt_external_output) - - print*,"WRITE SURFACE PRESSURE" - dummy = sfcp_output - call nemsio_writerecv(gfile, "pres", "sfc", 1, dummy, iret=iret) - if (iret/=0) goto 88 - deallocate(sfcp_output) - - print*,"WRITE TEMPERATURE" - do n = 1, lev - dummy = tmp_output(:,n) - call nemsio_writerecv(gfile, "tmp", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(tmp_output) - - print*,"WRITE CLOUD LIQUID WATER" - do n = 1, lev - dummy = clwmr_output(:,n) - call nemsio_writerecv(gfile, "clwmr", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(clwmr_output) - - print*,"WRITE SPECIFIC HUMIDITY" - do n = 1, lev - dummy = spfh_output(:,n) - call nemsio_writerecv(gfile, "spfh", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(spfh_output) - - print*,"WRITE OZONE" - do n = 1, lev - dummy = o3mr_output(:,n) - call nemsio_writerecv(gfile, "o3mr", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(o3mr_output) - - print*,"WRITE U-WINDS" - do n = 1, lev - dummy = ugrd_output(:,n) - call nemsio_writerecv(gfile, "ugrd", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(ugrd_output) - - print*,"WRITE V-WINDS" - do n = 1, lev - dummy = vgrd_output(:,n) - call nemsio_writerecv(gfile, "vgrd", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(vgrd_output) - - print*,"WRITE DZDT" - do n = 1, lev - dummy = dzdt_output(:,n) - call nemsio_writerecv(gfile, "dzdt", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(dzdt_output) - - print*,"WRITE DPRES" - do n = 1, lev - dummy = dpres_output(:,n) - call nemsio_writerecv(gfile, "dpres", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(dpres_output) - - print*,"WRITE DELZ" - do n = 1, lev - dummy = delz_output(:,n) - call nemsio_writerecv(gfile, "delz", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(delz_output) - - if (gfdl_mp) then - - print*,"WRITE RAIN WATER" - do n = 1, lev - dummy = rwmr_output(:,n) - call nemsio_writerecv(gfile, "rwmr", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(rwmr_output) - - print*,"WRITE SNOW WATER" - do n = 1, lev - dummy = snmr_output(:,n) - call nemsio_writerecv(gfile, "snmr", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(snmr_output) - - print*,"WRITE ICE WATER" - do n = 1, lev - dummy = icmr_output(:,n) - call nemsio_writerecv(gfile, "icmr", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(icmr_output) - - print*,"WRITE GRAUPEL" - do n = 1, lev - dummy = grle_output(:,n) - call nemsio_writerecv(gfile, "grle", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(grle_output) - - if (icldamt == 1) then - print*,"WRITE CLD_AMT" - do n = 1, lev - dummy = cldamt_output(:,n) - call nemsio_writerecv(gfile, "cld_amt", "mid layer", n, dummy, iret=iret) - if (iret/=0) goto 88 - enddo - deallocate(cldamt_output) - endif - - - endif - - deallocate(dummy) - - call nemsio_close(gfile, iret=iret) - - call nemsio_finalize() - - return - - 88 continue - print*,"FATAL ERROR WRITING FILE. IRET IS: ", iret - call errexit(10) - - end subroutine write_output_data - - subroutine header_set - -!------------------------------------------------------------------- -! Set header information for the output nemsio file. -!------------------------------------------------------------------- - - use input_data - use setup - - implicit none - - character(len=8) :: fields(9) - character(len=8) :: fields_gfdl_mp(5) - - integer :: count, l, n - -! Fields common to Zhao-Carr and GFDL microphysics - data fields /'ugrd', 'vgrd', 'dzdt', 'dpres', 'delz', & - 'tmp', 'spfh', 'clwmr', 'o3mr'/ - -! Fields for GFDL microphysics - data fields_gfdl_mp /'rwmr', 'icmr', 'snmr', 'grle', 'cld_amt'/ - - print* - print*,"SET HEADER INFO FOR OUTPUT FILE." - - if (gfdl_mp) then - nrec = ((13+icldamt) * lev) + 2 - else - nrec = (9 * lev) + 2 - endif - - allocate(recname(nrec)) - allocate(reclev(nrec)) - allocate(reclevtyp(nrec)) - - count = 0 - do n = 1, 9 - do l = 1, lev - count = count + 1 - recname(count) = fields(n) - reclev(count) = l - reclevtyp(count) = "mid layer" - enddo - enddo - - if (gfdl_mp) then - do n = 1, 4 + icldamt - do l = 1, lev - count = count + 1 - recname(count) = fields_gfdl_mp(n) - reclev(count) = l - reclevtyp(count) = "mid layer" - enddo - enddo - endif - - recname(nrec-1) = "pres" - reclev(nrec-1) = 1 - reclevtyp(nrec-1) = "sfc" - - recname(nrec) = "hgt" - reclev(nrec) = 1 - reclevtyp(nrec) = "sfc" - - allocate(vcoord_header(lev+1,3,2)) - vcoord_header = 0.0 - vcoord_header(:,1,1) = vcoord(:,1) - vcoord_header(:,2,1) = vcoord(:,2) - - allocate(lat(ij_output), lon(ij_output)) - - lat = rlat_output - lon = rlon_output - - deallocate(rlat_output, rlon_output) - - end subroutine header_set - - end module output_data diff --git a/sorc/enkf_chgres_recenter.fd/setup.f90 b/sorc/enkf_chgres_recenter.fd/setup.f90 deleted file mode 100644 index c2c2dc450e..0000000000 --- a/sorc/enkf_chgres_recenter.fd/setup.f90 +++ /dev/null @@ -1,53 +0,0 @@ - module setup - - use nemsio_module - - implicit none - - private - - character(len=300), public :: input_file - character(len=300), public :: output_file - character(len=300), public :: terrain_file - character(len=300), public :: vcoord_file - - integer(nemsio_intkind), public :: i_output - integer(nemsio_intkind), public :: j_output - integer , public :: ij_output - logical, public :: flipdelz - - public :: program_setup - - contains - - subroutine program_setup - - implicit none - - integer :: istat - - namelist /nam_setup/ i_output, j_output, input_file, output_file, & - terrain_file, vcoord_file - - print* - print*,"OPEN SETUP NAMELIST." - open(43, file="./fort.43", iostat=istat) - if (istat /= 0) then - print*,"FATAL ERROR OPENING NAMELIST FILE. ISTAT IS: ",istat - call errexit(30) - endif - - print*,"READ SETUP NAMELIST." - read(43, nml=nam_setup, iostat=istat) - if (istat /= 0) then - print*,"FATAL ERROR READING NAMELIST FILE. ISTAT IS: ",istat - call errexit(31) - endif - - ij_output = i_output * j_output - - close(43) - - end subroutine program_setup - - end module setup diff --git a/sorc/enkf_chgres_recenter.fd/utils.f90 b/sorc/enkf_chgres_recenter.fd/utils.f90 deleted file mode 100644 index e09c75b018..0000000000 --- a/sorc/enkf_chgres_recenter.fd/utils.f90 +++ /dev/null @@ -1,783 +0,0 @@ - module utils - - private - - public :: calc_kgds - public :: newps - public :: newpr1 - public :: vintg - public :: compute_delz - - contains - - subroutine compute_delz(ijm, levp, ak_in, bk_in, ps, zs, t, sphum, delz, flipsign) - - implicit none - integer, intent(in):: levp, ijm - real, intent(in), dimension(levp+1):: ak_in, bk_in - real, intent(in), dimension(ijm):: ps, zs - real, intent(in), dimension(ijm,levp):: t - real, intent(in), dimension(ijm,levp):: sphum - real, intent(out), dimension(ijm,levp):: delz - logical, intent(in) :: flipsign -! Local: - real, dimension(ijm,levp+1):: zh - real, dimension(ijm,levp+1):: pe0, pn0 - real, dimension(levp+1) :: ak, bk - integer i,k - real, parameter :: GRAV = 9.80665 - real, parameter :: RDGAS = 287.05 - real, parameter :: RVGAS = 461.50 - real :: zvir - real:: grd - - print*,"COMPUTE LAYER THICKNESS." - - grd = grav/rdgas - zvir = rvgas/rdgas - 1. - ak = ak_in - bk = bk_in - ak(levp+1) = max(1.e-9, ak(levp+1)) - - do i=1, ijm - pe0(i,levp+1) = ak(levp+1) - pn0(i,levp+1) = log(pe0(i,levp+1)) - enddo - - do k=levp,1, -1 - do i=1,ijm - pe0(i,k) = ak(k) + bk(k)*ps(i) - pn0(i,k) = log(pe0(i,k)) - enddo - enddo - - do i = 1, ijm - zh(i,1) = zs(i) - enddo - - do k = 2, levp+1 - do i = 1, ijm - zh(i,k) = zh(i,k-1)+t(i,k-1)*(1.+zvir*sphum(i,k-1))* & - (pn0(i,k-1)-pn0(i,k))/grd - enddo - enddo - - do k = 1, levp - do i = 1, ijm - if (flipsign) then - delz(i,k) = zh(i,k) - zh(i,k+1) - else - delz(i,k) = zh(i,k+1) - zh(i,k) - end if - enddo - enddo - - end subroutine compute_delz - - subroutine calc_kgds(idim, jdim, kgds) - - use nemsio_module - - implicit none - - integer(nemsio_intkind), intent(in) :: idim, jdim - - integer, intent(out) :: kgds(200) - - kgds = 0 - kgds(1) = 4 ! OCT 6 - TYPE OF GRID (GAUSSIAN) - kgds(2) = idim ! OCT 7-8 - # PTS ON LATITUDE CIRCLE - kgds(3) = jdim ! OCT 9-10 - # PTS ON LONGITUDE CIRCLE - kgds(4) = 90000 ! OCT 11-13 - LAT OF ORIGIN - kgds(5) = 0 ! OCT 14-16 - LON OF ORIGIN - kgds(6) = 128 ! OCT 17 - RESOLUTION FLAG - kgds(7) = -90000 ! OCT 18-20 - LAT OF EXTREME POINT - kgds(8) = nint(-360000./idim) ! OCT 21-23 - LON OF EXTREME POINT - kgds(9) = nint((360.0 / float(idim))*1000.0) - ! OCT 24-25 - LONGITUDE DIRECTION INCR. - kgds(10) = jdim/2 ! OCT 26-27 - NUMBER OF CIRCLES POLE TO EQUATOR - kgds(12) = 255 ! OCT 29 - RESERVED - kgds(20) = 255 ! OCT 5 - NOT USED, SET TO 255 - - end subroutine calc_kgds - - SUBROUTINE NEWPS(ZS,PS,IM,KM,P,T,Q,ZSNEW,PSNEW) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: NEWPS COMPUTE NEW SURFACE PRESSURE -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 92-10-31 -! -! ABSTRACT: COMPUTES A NEW SURFACE PRESSURE GIVEN A NEW OROGRAPHY. -! THE NEW PRESSURE IS COMPUTED ASSUMING A HYDROSTATIC BALANCE -! AND A CONSTANT TEMPERATURE LAPSE RATE. BELOW GROUND, THE -! LAPSE RATE IS ASSUMED TO BE -6.5 K/KM. -! -! PROGRAM HISTORY LOG: -! 91-10-31 MARK IREDELL -! -! USAGE: CALL NEWPS(ZS,PS,IM,KM,P,T,Q,ZSNEW,PSNEW) -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF POINTS TO COMPUTE -! ZS REAL (IM) OLD OROGRAPHY (M) -! PS REAL (IM) OLD SURFACE PRESSURE (PA) -! KM INTEGER NUMBER OF LEVELS -! P REAL (IM,KM) PRESSURES (PA) -! T REAL (IM,KM) TEMPERATURES (K) -! Q REAL (IM,KM) SPECIFIC HUMIDITIES (KG/KG) -! ZSNEW REAL (IM) NEW OROGRAPHY (M) -! OUTPUT ARGUMENT LIST: -! PSNEW REAL (IM) NEW SURFACE PRESSURE (PA) -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ - REAL ZS(IM),PS(IM),P(IM,KM),T(IM,KM),Q(IM,KM) - REAL ZSNEW(IM),PSNEW(IM) - PARAMETER(BETA=-6.5E-3,EPSILON=1.E-9) - PARAMETER(G=9.80665,RD=287.05,RV=461.50) - PARAMETER(GOR=G/RD,FV=RV/RD-1.) - REAL ZU(IM) - FTV(AT,AQ)=AT*(1+FV*AQ) - FGAM(APU,ATVU,APD,ATVD)=-GOR*LOG(ATVD/ATVU)/LOG(APD/APU) - FZ0(AP,ATV,AZD,APD)=AZD+ATV/GOR*LOG(APD/AP) - FZ1(AP,ATV,AZD,APD,AGAM)=AZD-ATV/AGAM*((APD/AP)**(-AGAM/GOR)-1) - FP0(AZ,AZU,APU,ATVU)=APU*EXP(-GOR/ATVU*(AZ-AZU)) - FP1(AZ,AZU,APU,ATVU,AGAM)=APU*(1+AGAM/ATVU*(AZ-AZU))**(-GOR/AGAM) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE SURFACE PRESSURE BELOW THE ORIGINAL GROUND - LS=0 - K=1 - GAMMA=BETA - DO I=1,IM - PU=P(I,K) - TVU=FTV(T(I,K),Q(I,K)) - ZU(I)=FZ1(PU,TVU,ZS(I),PS(I),GAMMA) - IF(ZSNEW(I).LE.ZU(I)) THEN - PU=P(I,K) - TVU=FTV(T(I,K),Q(I,K)) - IF(ABS(GAMMA).GT.EPSILON) THEN - PSNEW(I)=FP1(ZSNEW(I),ZU(I),PU,TVU,GAMMA) - ELSE - PSNEW(I)=FP0(ZSNEW(I),ZU(I),PU,TVU) - ENDIF - ELSE - PSNEW(I)=0 - LS=LS+1 - ENDIF -! endif - ENDDO -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE SURFACE PRESSURE ABOVE THE ORIGINAL GROUND - DO K=2,KM - IF(LS.GT.0) THEN - DO I=1,IM - IF(PSNEW(I).EQ.0) THEN - PU=P(I,K) - TVU=FTV(T(I,K),Q(I,K)) - PD=P(I,K-1) - TVD=FTV(T(I,K-1),Q(I,K-1)) - GAMMA=FGAM(PU,TVU,PD,TVD) - IF(ABS(GAMMA).GT.EPSILON) THEN - ZU(I)=FZ1(PU,TVU,ZU(I),PD,GAMMA) - ELSE - ZU(I)=FZ0(PU,TVU,ZU(I),PD) - ENDIF - IF(ZSNEW(I).LE.ZU(I)) THEN - IF(ABS(GAMMA).GT.EPSILON) THEN - PSNEW(I)=FP1(ZSNEW(I),ZU(I),PU,TVU,GAMMA) - ELSE - PSNEW(I)=FP0(ZSNEW(I),ZU(I),PU,TVU) - ENDIF - LS=LS-1 - ENDIF - ENDIF - ENDDO - ENDIF - ENDDO -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE SURFACE PRESSURE OVER THE TOP - IF(LS.GT.0) THEN - K=KM - GAMMA=0 - DO I=1,IM - IF(PSNEW(I).EQ.0) THEN - PU=P(I,K) - TVU=FTV(T(I,K),Q(I,K)) - PSNEW(I)=FP0(ZSNEW(I),ZU(I),PU,TVU) - ENDIF - ENDDO - ENDIF - END SUBROUTINE NEWPS - - SUBROUTINE NEWPR1(IM,KM,IDVC,IDSL,NVCOORD,VCOORD, & - PS,PM,DP) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: NEWPR1 COMPUTE MODEL PRESSURES -! PRGMMR: JUANG ORG: W/NMC23 DATE: 2005-04-11 -! PRGMMR: Fanglin Yang ORG: W/NMC23 DATE: 2006-11-28 -! PRGMMR: S. Moorthi ORG: NCEP/EMC DATE: 2006-12-12 -! PRGMMR: S. Moorthi ORG: NCEP/EMC DATE: 2007-01-02 -! -! ABSTRACT: COMPUTE MODEL PRESSURES. -! -! PROGRAM HISTORY LOG: -! 2005-04-11 HANN_MING HENRY JUANG hybrid sigma, sigma-p, and sigma- -! -! USAGE: CALL NEWPR1(IM,IX,KM,KMP,IDVC,IDSL,NVCOORD,VCOORD,PP,TP,QP,P -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF POINTS TO COMPUTE -! KM INTEGER NUMBER OF LEVELS -! IDVC INTEGER VERTICAL COORDINATE ID -! (1 FOR SIGMA AND 2 FOR HYBRID) -! IDSL INTEGER TYPE OF SIGMA STRUCTURE -! (1 FOR PHILLIPS OR 2 FOR MEAN) -! NVCOORD INTEGER NUMBER OF VERTICAL COORDINATES -! VCOORD REAL (KM+1,NVCOORD) VERTICAL COORDINATE VALUES -! FOR IDVC=1, NVCOORD=1: SIGMA INTERFACE -! FOR IDVC=2, NVCOORD=2: HYBRID INTERFACE A AND B -! FOR IDVC=3, NVCOORD=3: JUANG GENERAL HYBRID INTERFACE -! AK REAL (KM+1) HYBRID INTERFACE A -! BK REAL (KM+1) HYBRID INTERFACE B -! PS REAL (IX) SURFACE PRESSURE (PA) -! OUTPUT ARGUMENT LIST: -! PM REAL (IX,KM) MID-LAYER PRESSURE (PA) -! DP REAL (IX,KM) LAYER DELTA PRESSURE (PA) -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ - IMPLICIT NONE - - INTEGER, INTENT(IN) :: IM, KM, NVCOORD, IDVC, IDSL - - REAL, INTENT(IN) :: VCOORD(KM+1,NVCOORD) - REAL, INTENT(IN) :: PS(IM) - - REAL, INTENT(OUT) :: PM(IM,KM) - REAL, OPTIONAL, INTENT(OUT) :: DP(IM,KM) - - REAL, PARAMETER :: RD=287.05, RV=461.50, CP=1004.6, & - ROCP=RD/CP, ROCP1=ROCP+1, ROCPR=1/ROCP, & - FV=RV/RD-1. - - INTEGER :: I, K - - REAL :: AK(KM+1), BK(KM+1), PI(IM,KM+1) - - IF(IDVC.EQ.2) THEN - DO K=1,KM+1 - AK(K) = VCOORD(K,1) - BK(K) = VCOORD(K,2) - PI(1:IM,K) = AK(K) + BK(K)*PS(1:IM) - ENDDO - ELSE - print*,'routine only works for idvc 2' - stop - ENDIF - - IF(IDSL.EQ.2) THEN - DO K=1,KM - PM(1:IM,K) = (PI(1:IM,K)+PI(1:IM,K+1))/2 - ENDDO - ELSE - DO K=1,KM - PM(1:IM,K) = ((PI(1:IM,K)**ROCP1-PI(1:IM,K+1)**ROCP1)/ & - (ROCP1*(PI(1:IM,K)-PI(1:IM,K+1))))**ROCPR - ENDDO - ENDIF - - IF(PRESENT(DP))THEN - DO K=1,KM - DO I=1,IM - DP(I,K) = PI(I,K) - PI(I,K+1) - ENDDO - ENDDO - ENDIF - - END SUBROUTINE NEWPR1 - - SUBROUTINE TERP3(IM,IXZ1,IXQ1,IXZ2,IXQ2,NM,NXQ1,NXQ2, & - KM1,KXZ1,KXQ1,Z1,Q1,KM2,KXZ2,KXQ2,Z2,Q2,J2) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: TERP3 CUBICALLY INTERPOLATE IN ONE DIMENSION -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 98-05-01 -! -! ABSTRACT: INTERPOLATE FIELD(S) IN ONE DIMENSION ALONG THE COLUMN(S). -! THE INTERPOLATION IS CUBIC LAGRANGIAN WITH A MONOTONIC CONSTRAINT -! IN THE CENTER OF THE DOMAIN. IN THE OUTER INTERVALS IT IS LINEAR. -! OUTSIDE THE DOMAIN, FIELDS ARE HELD CONSTANT. -! -! PROGRAM HISTORY LOG: -! 98-05-01 MARK IREDELL -! 1999-01-04 IREDELL USE ESSL SEARCH -! -! USAGE: CALL TERP3(IM,IXZ1,IXQ1,IXZ2,IXQ2,NM,NXQ1,NXQ2, -! & KM1,KXZ1,KXQ1,Z1,Q1,KM2,KXZ2,KXQ2,Z2,Q2,J2) -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF COLUMNS -! IXZ1 INTEGER COLUMN SKIP NUMBER FOR Z1 -! IXQ1 INTEGER COLUMN SKIP NUMBER FOR Q1 -! IXZ2 INTEGER COLUMN SKIP NUMBER FOR Z2 -! IXQ2 INTEGER COLUMN SKIP NUMBER FOR Q2 -! NM INTEGER NUMBER OF FIELDS PER COLUMN -! NXQ1 INTEGER FIELD SKIP NUMBER FOR Q1 -! NXQ2 INTEGER FIELD SKIP NUMBER FOR Q2 -! KM1 INTEGER NUMBER OF INPUT POINTS -! KXZ1 INTEGER POINT SKIP NUMBER FOR Z1 -! KXQ1 INTEGER POINT SKIP NUMBER FOR Q1 -! Z1 REAL (1+(IM-1)*IXZ1+(KM1-1)*KXZ1) -! INPUT COORDINATE VALUES IN WHICH TO INTERPOLATE -! (Z1 MUST BE STRICTLY MONOTONIC IN EITHER DIRECTION) -! Q1 REAL (1+(IM-1)*IXQ1+(KM1-1)*KXQ1+(NM-1)*NXQ1) -! INPUT FIELDS TO INTERPOLATE -! KM2 INTEGER NUMBER OF OUTPUT POINTS -! KXZ2 INTEGER POINT SKIP NUMBER FOR Z2 -! KXQ2 INTEGER POINT SKIP NUMBER FOR Q2 -! Z2 REAL (1+(IM-1)*IXZ2+(KM2-1)*KXZ2) -! OUTPUT COORDINATE VALUES TO WHICH TO INTERPOLATE -! (Z2 NEED NOT BE MONOTONIC) -! -! OUTPUT ARGUMENT LIST: -! Q2 REAL (1+(IM-1)*IXQ2+(KM2-1)*KXQ2+(NM-1)*NXQ2) -! OUTPUT INTERPOLATED FIELDS -! J2 REAL (1+(IM-1)*IXQ2+(KM2-1)*KXQ2+(NM-1)*NXQ2) -! OUTPUT INTERPOLATED FIELDS CHANGE WRT Z2 -! -! SUBPROGRAMS CALLED: -! RSEARCH SEARCH FOR A SURROUNDING REAL INTERVAL -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ - IMPLICIT NONE - INTEGER IM,IXZ1,IXQ1,IXZ2,IXQ2,NM,NXQ1,NXQ2 - INTEGER KM1,KXZ1,KXQ1,KM2,KXZ2,KXQ2 - INTEGER I,K1,K2,N - REAL Z1(1+(IM-1)*IXZ1+(KM1-1)*KXZ1) - REAL Q1(1+(IM-1)*IXQ1+(KM1-1)*KXQ1+(NM-1)*NXQ1) - REAL Z2(1+(IM-1)*IXZ2+(KM2-1)*KXZ2) - REAL Q2(1+(IM-1)*IXQ2+(KM2-1)*KXQ2+(NM-1)*NXQ2) - REAL J2(1+(IM-1)*IXQ2+(KM2-1)*KXQ2+(NM-1)*NXQ2) - REAL FFA(IM),FFB(IM),FFC(IM),FFD(IM) - REAL GGA(IM),GGB(IM),GGC(IM),GGD(IM) - INTEGER K1S(IM,KM2) - REAL Z1A,Z1B,Z1C,Z1D,Q1A,Q1B,Q1C,Q1D,Z2S,Q2S,J2S -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! FIND THE SURROUNDING INPUT INTERVAL FOR EACH OUTPUT POINT. - CALL RSEARCH(IM,KM1,IXZ1,KXZ1,Z1,KM2,IXZ2,KXZ2,Z2,1,IM,K1S) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! GENERALLY INTERPOLATE CUBICALLY WITH MONOTONIC CONSTRAINT -! FROM TWO NEAREST INPUT POINTS ON EITHER SIDE OF THE OUTPUT POINT, -! BUT WITHIN THE TWO EDGE INTERVALS INTERPOLATE LINEARLY. -! KEEP THE OUTPUT FIELDS CONSTANT OUTSIDE THE INPUT DOMAIN. - -!!$OMP PARALLEL DO DEFAULT(PRIVATE) SHARED(IM,IXZ1,IXQ1,IXZ2), & -!!$OMP& SHARED(IXQ2,NM,NXQ1,NXQ2,KM1,KXZ1,KXQ1,Z1,Q1,KM2,KXZ2), & -!!$OMP& SHARED(KXQ2,Z2,Q2,J2,K1S) - - DO K2=1,KM2 - DO I=1,IM - K1=K1S(I,K2) - IF(K1.EQ.1.OR.K1.EQ.KM1-1) THEN - Z2S=Z2(1+(I-1)*IXZ2+(K2-1)*KXZ2) - Z1A=Z1(1+(I-1)*IXZ1+(K1-1)*KXZ1) - Z1B=Z1(1+(I-1)*IXZ1+(K1+0)*KXZ1) - FFA(I)=(Z2S-Z1B)/(Z1A-Z1B) - FFB(I)=(Z2S-Z1A)/(Z1B-Z1A) - GGA(I)=1/(Z1A-Z1B) - GGB(I)=1/(Z1B-Z1A) - ELSEIF(K1.GT.1.AND.K1.LT.KM1-1) THEN - Z2S=Z2(1+(I-1)*IXZ2+(K2-1)*KXZ2) - Z1A=Z1(1+(I-1)*IXZ1+(K1-2)*KXZ1) - Z1B=Z1(1+(I-1)*IXZ1+(K1-1)*KXZ1) - Z1C=Z1(1+(I-1)*IXZ1+(K1+0)*KXZ1) - Z1D=Z1(1+(I-1)*IXZ1+(K1+1)*KXZ1) - FFA(I)=(Z2S-Z1B)/(Z1A-Z1B)* & - (Z2S-Z1C)/(Z1A-Z1C)* & - (Z2S-Z1D)/(Z1A-Z1D) - FFB(I)=(Z2S-Z1A)/(Z1B-Z1A)* & - (Z2S-Z1C)/(Z1B-Z1C)* & - (Z2S-Z1D)/(Z1B-Z1D) - FFC(I)=(Z2S-Z1A)/(Z1C-Z1A)* & - (Z2S-Z1B)/(Z1C-Z1B)* & - (Z2S-Z1D)/(Z1C-Z1D) - FFD(I)=(Z2S-Z1A)/(Z1D-Z1A)* & - (Z2S-Z1B)/(Z1D-Z1B)* & - (Z2S-Z1C)/(Z1D-Z1C) - GGA(I)= 1/(Z1A-Z1B)* & - (Z2S-Z1C)/(Z1A-Z1C)* & - (Z2S-Z1D)/(Z1A-Z1D)+ & - (Z2S-Z1B)/(Z1A-Z1B)* & - 1/(Z1A-Z1C)* & - (Z2S-Z1D)/(Z1A-Z1D)+ & - (Z2S-Z1B)/(Z1A-Z1B)* & - (Z2S-Z1C)/(Z1A-Z1C)* & - 1/(Z1A-Z1D) - GGB(I)= 1/(Z1B-Z1A)* & - (Z2S-Z1C)/(Z1B-Z1C)* & - (Z2S-Z1D)/(Z1B-Z1D)+ & - (Z2S-Z1A)/(Z1B-Z1A)* & - 1/(Z1B-Z1C)* & - (Z2S-Z1D)/(Z1B-Z1D)+ & - (Z2S-Z1A)/(Z1B-Z1A)* & - (Z2S-Z1C)/(Z1B-Z1C)* & - 1/(Z1B-Z1D) - GGC(I)= 1/(Z1C-Z1A)* & - (Z2S-Z1B)/(Z1C-Z1B)* & - (Z2S-Z1D)/(Z1C-Z1D)+ & - (Z2S-Z1A)/(Z1C-Z1A)* & - 1/(Z1C-Z1B)* & - (Z2S-Z1D)/(Z1C-Z1D)+ & - (Z2S-Z1A)/(Z1C-Z1A)* & - (Z2S-Z1B)/(Z1C-Z1B)* & - 1/(Z1C-Z1D) - GGD(I)= 1/(Z1D-Z1A)* & - (Z2S-Z1B)/(Z1D-Z1B)* & - (Z2S-Z1C)/(Z1D-Z1C)+ & - (Z2S-Z1A)/(Z1D-Z1A)* & - 1/(Z1D-Z1B)* & - (Z2S-Z1C)/(Z1D-Z1C)+ & - (Z2S-Z1A)/(Z1D-Z1A)* & - (Z2S-Z1B)/(Z1D-Z1B)* & - 1/(Z1D-Z1C) - ENDIF - ENDDO -! INTERPOLATE. - DO N=1,NM - DO I=1,IM - K1=K1S(I,K2) - IF(K1.EQ.0) THEN - Q2S=Q1(1+(I-1)*IXQ1+(N-1)*NXQ1) - J2S=0 - ELSEIF(K1.EQ.KM1) THEN - Q2S=Q1(1+(I-1)*IXQ1+(KM1-1)*KXQ1+(N-1)*NXQ1) - J2S=0 - ELSEIF(K1.EQ.1.OR.K1.EQ.KM1-1) THEN - Q1A=Q1(1+(I-1)*IXQ1+(K1-1)*KXQ1+(N-1)*NXQ1) - Q1B=Q1(1+(I-1)*IXQ1+(K1+0)*KXQ1+(N-1)*NXQ1) - Q2S=FFA(I)*Q1A+FFB(I)*Q1B - J2S=GGA(I)*Q1A+GGB(I)*Q1B - ELSE - Q1A=Q1(1+(I-1)*IXQ1+(K1-2)*KXQ1+(N-1)*NXQ1) - Q1B=Q1(1+(I-1)*IXQ1+(K1-1)*KXQ1+(N-1)*NXQ1) - Q1C=Q1(1+(I-1)*IXQ1+(K1+0)*KXQ1+(N-1)*NXQ1) - Q1D=Q1(1+(I-1)*IXQ1+(K1+1)*KXQ1+(N-1)*NXQ1) - Q2S=FFA(I)*Q1A+FFB(I)*Q1B+FFC(I)*Q1C+FFD(I)*Q1D - J2S=GGA(I)*Q1A+GGB(I)*Q1B+GGC(I)*Q1C+GGD(I)*Q1D - IF(Q2S.LT.MIN(Q1B,Q1C)) THEN - Q2S=MIN(Q1B,Q1C) - J2S=0 - ELSEIF(Q2S.GT.MAX(Q1B,Q1C)) THEN - Q2S=MAX(Q1B,Q1C) - J2S=0 - ENDIF - ENDIF - Q2(1+(I-1)*IXQ2+(K2-1)*KXQ2+(N-1)*NXQ2)=Q2S - J2(1+(I-1)*IXQ2+(K2-1)*KXQ2+(N-1)*NXQ2)=J2S - ENDDO - ENDDO - ENDDO -!!$OMP END PARALLEL DO - - END SUBROUTINE TERP3 - - SUBROUTINE RSEARCH(IM,KM1,IXZ1,KXZ1,Z1,KM2,IXZ2,KXZ2,Z2,IXL2,KXL2,& - L2) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: RSEARCH SEARCH FOR A SURROUNDING REAL INTERVAL -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 98-05-01 -! -! ABSTRACT: THIS SUBPROGRAM SEARCHES MONOTONIC SEQUENCES OF REAL NUMBERS -! FOR INTERVALS THAT SURROUND A GIVEN SEARCH SET OF REAL NUMBERS. -! THE SEQUENCES MAY BE MONOTONIC IN EITHER DIRECTION; THE REAL NUMBERS -! MAY BE SINGLE OR DOUBLE PRECISION; THE INPUT SEQUENCES AND SETS -! AND THE OUTPUT LOCATIONS MAY BE ARBITRARILY DIMENSIONED. -! -! PROGRAM HISTORY LOG: -! 1999-01-05 MARK IREDELL -! -! USAGE: CALL RSEARCH(IM,KM1,IXZ1,KXZ1,Z1,KM2,IXZ2,KXZ2,Z2,IXL2,KXL2, -! & L2) -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF SEQUENCES TO SEARCH -! KM1 INTEGER NUMBER OF POINTS IN EACH SEQUENCE -! IXZ1 INTEGER SEQUENCE SKIP NUMBER FOR Z1 -! KXZ1 INTEGER POINT SKIP NUMBER FOR Z1 -! Z1 REAL (1+(IM-1)*IXZ1+(KM1-1)*KXZ1) -! SEQUENCE VALUES TO SEARCH -! (Z1 MUST BE MONOTONIC IN EITHER DIRECTION) -! KM2 INTEGER NUMBER OF POINTS TO SEARCH FOR -! IN EACH RESPECTIVE SEQUENCE -! IXZ2 INTEGER SEQUENCE SKIP NUMBER FOR Z2 -! KXZ2 INTEGER POINT SKIP NUMBER FOR Z2 -! Z2 REAL (1+(IM-1)*IXZ2+(KM2-1)*KXZ2) -! SET OF VALUES TO SEARCH FOR -! (Z2 NEED NOT BE MONOTONIC) -! IXL2 INTEGER SEQUENCE SKIP NUMBER FOR L2 -! KXL2 INTEGER POINT SKIP NUMBER FOR L2 -! -! OUTPUT ARGUMENT LIST: -! L2 INTEGER (1+(IM-1)*IXL2+(KM2-1)*KXL2) -! INTERVAL LOCATIONS HAVING VALUES FROM 0 TO KM1 -! (Z2 WILL BE BETWEEN Z1(L2) AND Z1(L2+1)) -! -! SUBPROGRAMS CALLED: -! SBSRCH ESSL BINARY SEARCH -! DBSRCH ESSL BINARY SEARCH -! -! REMARKS: -! IF THE ARRAY Z1 IS DIMENSIONED (IM,KM1), THEN THE SKIP NUMBERS ARE -! IXZ1=1 AND KXZ1=IM; IF IT IS DIMENSIONED (KM1,IM), THEN THE SKIP -! NUMBERS ARE IXZ1=KM1 AND KXZ1=1; IF IT IS DIMENSIONED (IM,JM,KM1), -! THEN THE SKIP NUMBERS ARE IXZ1=1 AND KXZ1=IM*JM; ETCETERA. -! SIMILAR EXAMPLES APPLY TO THE SKIP NUMBERS FOR Z2 AND L2. -! -! RETURNED VALUES OF 0 OR KM1 INDICATE THAT THE GIVEN SEARCH VALUE -! IS OUTSIDE THE RANGE OF THE SEQUENCE. -! -! IF A SEARCH VALUE IS IDENTICAL TO ONE OF THE SEQUENCE VALUES -! THEN THE LOCATION RETURNED POINTS TO THE IDENTICAL VALUE. -! IF THE SEQUENCE IS NOT STRICTLY MONOTONIC AND A SEARCH VALUE IS -! IDENTICAL TO MORE THAN ONE OF THE SEQUENCE VALUES, THEN THE -! LOCATION RETURNED MAY POINT TO ANY OF THE IDENTICAL VALUES. -! -! TO BE EXACT, FOR EACH I FROM 1 TO IM AND FOR EACH K FROM 1 TO KM2, -! Z=Z2(1+(I-1)*IXZ2+(K-1)*KXZ2) IS THE SEARCH VALUE AND -! L=L2(1+(I-1)*IXL2+(K-1)*KXL2) IS THE LOCATION RETURNED. -! IF L=0, THEN Z IS LESS THAN THE START POINT Z1(1+(I-1)*IXZ1) -! FOR ASCENDING SEQUENCES (OR GREATER THAN FOR DESCENDING SEQUENCES). -! IF L=KM1, THEN Z IS GREATER THAN OR EQUAL TO THE END POINT -! Z1(1+(I-1)*IXZ1+(KM1-1)*KXZ1) FOR ASCENDING SEQUENCES -! (OR LESS THAN OR EQUAL TO FOR DESCENDING SEQUENCES). -! OTHERWISE Z IS BETWEEN THE VALUES Z1(1+(I-1)*IXZ1+(L-1)*KXZ1) AND -! Z1(1+(I-1)*IXZ1+(L-0)*KXZ1) AND MAY EQUAL THE FORMER. -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ -! IMPLICIT NONE -! INTEGER,INTENT(IN):: IM,KM1,IXZ1,KXZ1,KM2,IXZ2,KXZ2,IXL2,KXL2 -! REAL,INTENT(IN):: Z1(1+(IM-1)*IXZ1+(KM1-1)*KXZ1) -! REAL,INTENT(IN):: Z2(1+(IM-1)*IXZ2+(KM2-1)*KXZ2) -! INTEGER,INTENT(OUT):: L2(1+(IM-1)*IXL2+(KM2-1)*KXL2) -! INTEGER(4) INCX,N,INCY,M,INDX(KM2),RC(KM2),IOPT -! INTEGER I,K2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! FIND THE SURROUNDING INPUT INTERVAL FOR EACH OUTPUT POINT. -! DO I=1,IM -! IF(Z1(1+(I-1)*IXZ1).LE.Z1(1+(I-1)*IXZ1+(KM1-1)*KXZ1)) THEN -! INPUT COORDINATE IS MONOTONICALLY ASCENDING. -! INCX=KXZ2 -! N=KM2 -! INCY=KXZ1 -! M=KM1 -! IOPT=1 -! IF(DIGITS(1.).LT.DIGITS(1._8)) THEN -! CALL SBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ELSE -! CALL DBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ENDIF -! DO K2=1,KM2 -! L2(1+(I-1)*IXL2+(K2-1)*KXL2)=INDX(K2)-RC(K2) -! ENDDO -! ELSE -! INPUT COORDINATE IS MONOTONICALLY DESCENDING. -! INCX=KXZ2 -! N=KM2 -! INCY=-KXZ1 -! M=KM1 -! IOPT=0 -! IF(DIGITS(1.).LT.DIGITS(1._8)) THEN -! CALL SBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ELSE -! CALL DBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ENDIF -! DO K2=1,KM2 -! L2(1+(I-1)*IXL2+(K2-1)*KXL2)=KM1+1-INDX(K2) -! ENDDO -! ENDIF -! ENDDO -! - IMPLICIT NONE - INTEGER,INTENT(IN):: IM,KM1,IXZ1,KXZ1,KM2,IXZ2,KXZ2,IXL2,KXL2 - REAL,INTENT(IN):: Z1(1+(IM-1)*IXZ1+(KM1-1)*KXZ1) - REAL,INTENT(IN):: Z2(1+(IM-1)*IXZ2+(KM2-1)*KXZ2) - INTEGER,INTENT(OUT):: L2(1+(IM-1)*IXL2+(KM2-1)*KXL2) - INTEGER I,K2,L - REAL Z -!C - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -!C FIND THE SURROUNDING INPUT INTERVAL FOR EACH OUTPUT POINT. - DO I=1,IM - IF(Z1(1+(I-1)*IXZ1).LE.Z1(1+(I-1)*IXZ1+(KM1-1)*KXZ1)) THEN -!C INPUT COORDINATE IS MONOTONICALLY ASCENDING. - DO K2=1,KM2 - Z=Z2(1+(I-1)*IXZ2+(K2-1)*KXZ2) - L=0 - DO - IF(Z.LT.Z1(1+(I-1)*IXZ1+L*KXZ1)) EXIT - L=L+1 - IF(L.EQ.KM1) EXIT - ENDDO - L2(1+(I-1)*IXL2+(K2-1)*KXL2)=L - ENDDO - ELSE -!C INPUT COORDINATE IS MONOTONICALLY DESCENDING. - DO K2=1,KM2 - Z=Z2(1+(I-1)*IXZ2+(K2-1)*KXZ2) - L=0 - DO - IF(Z.GT.Z1(1+(I-1)*IXZ1+L*KXZ1)) EXIT - L=L+1 - IF(L.EQ.KM1) EXIT - ENDDO - L2(1+(I-1)*IXL2+(K2-1)*KXL2)=L - ENDDO - ENDIF - ENDDO - - END SUBROUTINE RSEARCH - - SUBROUTINE VINTG(IM,KM1,KM2,NT,P1,U1,V1,T1,Q1,W1,P2, & - U2,V2,T2,Q2,W2) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: VINTG VERTICALLY INTERPOLATE UPPER-AIR FIELDS -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 92-10-31 -! -! ABSTRACT: VERTICALLY INTERPOLATE UPPER-AIR FIELDS. -! WIND, TEMPERATURE, HUMIDITY AND OTHER TRACERS ARE INTERPOLATED. -! THE INTERPOLATION IS CUBIC LAGRANGIAN IN LOG PRESSURE -! WITH A MONOTONIC CONSTRAINT IN THE CENTER OF THE DOMAIN. -! IN THE OUTER INTERVALS IT IS LINEAR IN LOG PRESSURE. -! OUTSIDE THE DOMAIN, FIELDS ARE GENERALLY HELD CONSTANT, -! EXCEPT FOR TEMPERATURE AND HUMIDITY BELOW THE INPUT DOMAIN, -! WHERE THE TEMPERATURE LAPSE RATE IS HELD FIXED AT -6.5 K/KM AND -! THE RELATIVE HUMIDITY IS HELD CONSTANT. -! -! PROGRAM HISTORY LOG: -! 91-10-31 MARK IREDELL -! -! USAGE: CALL VINTG(IM,KM1,KM2,NT,P1,U1,V1,T1,Q1,P2, -! & U2,V2,T2,Q2) -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF POINTS TO COMPUTE -! KM1 INTEGER NUMBER OF INPUT LEVELS -! KM2 INTEGER NUMBER OF OUTPUT LEVELS -! NT INTEGER NUMBER OF TRACERS -! P1 REAL (IM,KM1) INPUT PRESSURES -! ORDERED FROM BOTTOM TO TOP OF ATMOSPHERE -! U1 REAL (IM,KM1) INPUT ZONAL WIND -! V1 REAL (IM,KM1) INPUT MERIDIONAL WIND -! T1 REAL (IM,KM1) INPUT TEMPERATURE (K) -! Q1 REAL (IM,KM1,NT) INPUT TRACERS (HUMIDITY FIRST) -! P2 REAL (IM,KM2) OUTPUT PRESSURES -! OUTPUT ARGUMENT LIST: -! U2 REAL (IM,KM2) OUTPUT ZONAL WIND -! V2 REAL (IM,KM2) OUTPUT MERIDIONAL WIND -! T2 REAL (IM,KM2) OUTPUT TEMPERATURE (K) -! Q2 REAL (IM,KM2,NT) OUTPUT TRACERS (HUMIDITY FIRST) -! -! SUBPROGRAMS CALLED: -! TERP3 CUBICALLY INTERPOLATE IN ONE DIMENSION -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ - IMPLICIT NONE - - INTEGER, INTENT(IN) :: IM, KM1, KM2, NT - - REAL, INTENT(IN) :: P1(IM,KM1),U1(IM,KM1),V1(IM,KM1) - REAL, INTENT(IN) :: T1(IM,KM1),Q1(IM,KM1,NT) - REAL, INTENT(IN) :: W1(IM,KM1),P2(IM,KM2) - REAL, INTENT(OUT) :: U2(IM,KM2),V2(IM,KM2) - REAL, INTENT(OUT) :: T2(IM,KM2),Q2(IM,KM2,NT) - REAL, INTENT(OUT) :: W2(IM,KM2) - - REAL, PARAMETER :: DLTDZ=-6.5E-3*287.05/9.80665 - REAL, PARAMETER :: DLPVDRT=-2.5E6/461.50 - - INTEGER :: I, K, N - - REAL :: DZ - REAL,ALLOCATABLE :: Z1(:,:),Z2(:,:) - REAL,ALLOCATABLE :: C1(:,:,:),C2(:,:,:),J2(:,:,:) - - ALLOCATE (Z1(IM+1,KM1),Z2(IM+1,KM2)) - ALLOCATE (C1(IM+1,KM1,4+NT),C2(IM+1,KM2,4+NT),J2(IM+1,KM2,4+NT)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE LOG PRESSURE INTERPOLATING COORDINATE -! AND COPY INPUT WIND, TEMPERATURE, HUMIDITY AND OTHER TRACERS -!$OMP PARALLEL DO PRIVATE(K,I) - DO K=1,KM1 - DO I=1,IM - Z1(I,K) = -LOG(P1(I,K)) - C1(I,K,1) = U1(I,K) - C1(I,K,2) = V1(I,K) - C1(I,K,3) = W1(I,K) - C1(I,K,4) = T1(I,K) - C1(I,K,5) = Q1(I,K,1) - ENDDO - ENDDO -!$OMP END PARALLEL DO - DO N=2,NT - DO K=1,KM1 - DO I=1,IM - C1(I,K,4+N) = Q1(I,K,N) - ENDDO - ENDDO - ENDDO -!$OMP PARALLEL DO PRIVATE(K,I) - DO K=1,KM2 - DO I=1,IM - Z2(I,K) = -LOG(P2(I,K)) - ENDDO - ENDDO -!$OMP END PARALLEL DO -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! PERFORM LAGRANGIAN ONE-DIMENSIONAL INTERPOLATION -! THAT IS 4TH-ORDER IN INTERIOR, 2ND-ORDER IN OUTSIDE INTERVALS -! AND 1ST-ORDER FOR EXTRAPOLATION. - CALL TERP3(IM,1,1,1,1,4+NT,(IM+1)*KM1,(IM+1)*KM2, & - KM1,IM+1,IM+1,Z1,C1,KM2,IM+1,IM+1,Z2,C2,J2) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COPY OUTPUT WIND, TEMPERATURE, HUMIDITY AND OTHER TRACERS -! EXCEPT BELOW THE INPUT DOMAIN, LET TEMPERATURE INCREASE WITH A FIXED -! LAPSE RATE AND LET THE RELATIVE HUMIDITY REMAIN CONSTANT. - DO K=1,KM2 - DO I=1,IM - U2(I,K)=C2(I,K,1) - V2(I,K)=C2(I,K,2) - W2(I,K)=C2(I,K,3) - DZ=Z2(I,K)-Z1(I,1) - IF(DZ.GE.0) THEN - T2(I,K)=C2(I,K,4) - Q2(I,K,1)=C2(I,K,5) - ELSE - T2(I,K)=T1(I,1)*EXP(DLTDZ*DZ) - Q2(I,K,1)=Q1(I,1,1)*EXP(DLPVDRT*(1/T2(I,K)-1/T1(I,1))-DZ) - ENDIF - ENDDO - ENDDO - DO N=2,NT - DO K=1,KM2 - DO I=1,IM - Q2(I,K,N)=C2(I,K,4+N) - ENDDO - ENDDO - ENDDO - DEALLOCATE (Z1,Z2,C1,C2,J2) - END SUBROUTINE VINTG - end module utils diff --git a/sorc/enkf_chgres_recenter_nc.fd/driver.f90 b/sorc/enkf_chgres_recenter_nc.fd/driver.f90 deleted file mode 100644 index 1ec7c70f03..0000000000 --- a/sorc/enkf_chgres_recenter_nc.fd/driver.f90 +++ /dev/null @@ -1,67 +0,0 @@ -!!! based on chgres_recenter -!!! cory.r.martin@noaa.gov 2019-09-27 - program regrid - - use setup, only : program_setup - use interp, only : gaus_to_gaus, adjust_for_terrain - use input_data, only : read_input_data, & - read_vcoord_info - use output_data, only : set_output_grid, write_output_data - - implicit none - - call w3tagb('ENKF_CHGRES_RECENTER_NCIO',2019,0270,0085,'NP20') - - print*,"STARTING PROGRAM" - -!-------------------------------------------------------- -! Read configuration namelist. -!-------------------------------------------------------- - - call program_setup - -!-------------------------------------------------------- -! Read input grid data -!-------------------------------------------------------- - - call read_input_data - -!-------------------------------------------------------- -! Read vertical coordinate info -!-------------------------------------------------------- - - call read_vcoord_info - -!-------------------------------------------------------- -! Get output grid specs -!-------------------------------------------------------- - - call set_output_grid - -!-------------------------------------------------------- -! Interpolate data to output grid -!-------------------------------------------------------- - - call gaus_to_gaus - -!-------------------------------------------------------- -! Adjust output fields for differences between -! interpolated and external terrain. -!-------------------------------------------------------- - - call adjust_for_terrain - -!-------------------------------------------------------- -! Write output data to file. -!-------------------------------------------------------- - - call write_output_data - - print* - print*,"PROGRAM FINISHED NORMALLY!" - - call w3tage('ENKF_CHGRES_RECENTER_NCIO') - - stop - - end program regrid diff --git a/sorc/enkf_chgres_recenter_nc.fd/input_data.f90 b/sorc/enkf_chgres_recenter_nc.fd/input_data.f90 deleted file mode 100644 index b77fe26b3e..0000000000 --- a/sorc/enkf_chgres_recenter_nc.fd/input_data.f90 +++ /dev/null @@ -1,345 +0,0 @@ - module input_data - - use utils - use setup - use module_ncio - - implicit none - - private - - integer, public :: idvc, idsl, idvm, nvcoord - integer, public :: nvcoord_input, ntrac, ncldt - integer, public :: ij_input, kgds_input(200) - integer, public :: i_input, j_input, lev, lev_output - integer, public :: idate(6) - integer, public :: icldamt, iicmr, & - idelz,idpres,idzdt, & - irwmr,isnmr,igrle - - - real, allocatable, public :: vcoord(:,:) - real, allocatable, public :: vcoord_input(:,:) - real, allocatable, public :: clwmr_input(:,:) - real, allocatable, public :: dzdt_input(:,:) - real, allocatable, public :: grle_input(:,:) - real, allocatable, public :: cldamt_input(:,:) - real, allocatable, public :: hgt_input(:) - real, allocatable, public :: icmr_input(:,:) - real, allocatable, public :: o3mr_input(:,:) - real, allocatable, public :: rwmr_input(:,:) - real, allocatable, public :: sfcp_input(:) - real, allocatable, public :: snmr_input(:,:) - real, allocatable, public :: spfh_input(:,:) - real, allocatable, public :: tmp_input(:,:) - real, allocatable, public :: ugrd_input(:,:) - real, allocatable, public :: vgrd_input(:,:) - real :: missing_value=1.e30 - - public :: read_input_data - public :: read_vcoord_info - - contains - - subroutine read_input_data - -!------------------------------------------------------------------------------------- -! Read input grid data from a netcdf file. -!------------------------------------------------------------------------------------- - - implicit none - - integer :: vlev,rvlev - type(Dataset) :: indset - type(Dimension) :: ncdim - real, allocatable :: work2d(:,:),work3d(:,:,:) - integer iret, k, kk - real, allocatable :: ak(:), bk(:) - - ! hard code these values that are the same for GFS - idvc=2 - idsl=1 - idvm=1 - ntrac = 8 - ncldt = 5 - - print* - print*,"OPEN INPUT FILE: ",trim(input_file) - indset = open_dataset(input_file) - - print*,"GET INPUT FILE HEADER" - ncdim = get_dim(indset, 'grid_xt'); i_input = ncdim%len - ncdim = get_dim(indset, 'grid_yt'); j_input = ncdim%len - ncdim = get_dim(indset, 'pfull'); lev = ncdim%len - idate = get_idate_from_time_units(indset) - - print*,'DIMENSIONS OF DATA ARE: ', i_input, j_input, lev - print*,'DATE OF DATA IS: ', idate - - ij_input = i_input * j_input - - call read_attribute(indset, 'ak', ak) - call read_attribute(indset, 'bk', bk) - - nvcoord_input = 2 - allocate(vcoord_input(lev+1,nvcoord_input)) - do k = 1, lev+1 - kk = lev+2-k - vcoord_input(k,1) = ak(kk) - vcoord_input(k,2) = bk(kk) - print*,'VCOORD OF INPUT DATA ',k,vcoord_input(k,:) - enddo - - deallocate(ak, bk) - - print* - print*,"READ SURFACE PRESSURE" - call read_vardata(indset, 'pressfc', work2d) - - allocate(sfcp_input(ij_input)) - sfcp_input = reshape(work2d,(/ij_input/)) - print*,'MAX/MIN SURFACE PRESSURE: ',maxval(sfcp_input), minval(sfcp_input) - - print* - print*,"READ SURFACE HEIGHT" - call read_vardata(indset, 'hgtsfc', work2d) - - allocate(hgt_input(ij_input)) - hgt_input = reshape(work2d,(/ij_input/)) - print*,'MAX/MIN SURFACE HEIGHT: ',maxval(hgt_input), minval(hgt_input) - - print* - print*,"READ U WIND" - allocate(ugrd_input(ij_input,lev)) - call read_vardata(indset, 'ugrd', work3d) - do vlev = 1, lev - rvlev = lev+1-vlev - ugrd_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN U WIND AT LEVEL ',vlev, "IS: ", maxval(ugrd_input(:,vlev)), minval(ugrd_input(:,vlev)) - enddo - - print* - print*,"READ V WIND" - allocate(vgrd_input(ij_input,lev)) - call read_vardata(indset, 'vgrd', work3d) - do vlev = 1, lev - rvlev = lev+1-vlev - vgrd_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN V WIND AT LEVEL ', vlev, "IS: ", maxval(vgrd_input(:,vlev)), minval(vgrd_input(:,vlev)) - enddo - - print* - print*,"READ TEMPERATURE" - allocate(tmp_input(ij_input,lev)) - call read_vardata(indset, 'tmp', work3d) - do vlev = 1, lev - rvlev = lev+1-vlev - tmp_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN TEMPERATURE AT LEVEL ', vlev, 'IS: ', maxval(tmp_input(:,vlev)), minval(tmp_input(:,vlev)) - enddo - - print* - print*,"READ SPECIFIC HUMIDITY" - allocate(spfh_input(ij_input,lev)) - call read_vardata(indset, 'spfh', work3d) - do vlev = 1, lev - rvlev = lev+1-vlev - spfh_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN SPECIFIC HUMIDITY AT LEVEL ', vlev, 'IS: ', maxval(spfh_input(:,vlev)), minval(spfh_input(:,vlev)) - enddo - - print* - print*,"READ CLOUD LIQUID WATER" - allocate(clwmr_input(ij_input,lev)) - call read_vardata(indset, 'clwmr', work3d) - do vlev = 1, lev - rvlev = lev+1-vlev - clwmr_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN CLOUD LIQUID WATER AT LEVEL ', vlev, 'IS: ', maxval(clwmr_input(:,vlev)), minval(clwmr_input(:,vlev)) - enddo - - print* - print*,"READ OZONE" - allocate(o3mr_input(ij_input,lev)) - call read_vardata(indset, 'o3mr', work3d) - do vlev = 1, lev - rvlev = lev+1-vlev - o3mr_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN OZONE AT LEVEL ', vlev, 'IS: ', maxval(o3mr_input(:,vlev)), minval(o3mr_input(:,vlev)) - enddo - - print* - print*,"READ DZDT" - allocate(dzdt_input(ij_input,lev)) - call read_vardata(indset, 'dzdt', work3d, errcode=iret) - if (iret == 0) then - do vlev = 1, lev - rvlev = lev+1-vlev - dzdt_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN DZDT AT LEVEL ', vlev, 'IS: ', maxval(dzdt_input(:,vlev)), minval(dzdt_input(:,vlev)) - enddo - idzdt = 1 - else - dzdt_input = missing_value - print*,'DZDT NOT IN INPUT FILE' - idzdt = 0 - endif - - - print* - print*,"READ RWMR" - allocate(rwmr_input(ij_input,lev)) - call read_vardata(indset, 'rwmr', work3d, errcode=iret) - if (iret == 0) then - do vlev = 1, lev - rvlev = lev+1-vlev - rwmr_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN RWMR AT LEVEL ', vlev, 'IS: ', maxval(rwmr_input(:,vlev)), minval(rwmr_input(:,vlev)) - enddo - irwmr = 1 - else - rwmr_input = missing_value - print*,'RWMR NOT IN INPUT FILE' - irwmr = 0 - endif - - print* - print*,"READ ICMR" - allocate(icmr_input(ij_input,lev)) - call read_vardata(indset, 'icmr', work3d, errcode=iret) - if (iret == 0) then - do vlev = 1, lev - rvlev = lev+1-vlev - icmr_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN ICMR AT LEVEL ', vlev, 'IS: ', maxval(icmr_input(:,vlev)), minval(icmr_input(:,vlev)) - enddo - iicmr = 1 - else - icmr_input = missing_value - print*,'ICMR NOT IN INPUT FILE' - iicmr = 0 - endif - - print* - print*,"READ SNMR" - allocate(snmr_input(ij_input,lev)) - call read_vardata(indset, 'snmr', work3d, errcode=iret) - if (iret == 0) then - do vlev = 1, lev - rvlev = lev+1-vlev - snmr_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN SNMR AT LEVEL ', vlev, 'IS: ', maxval(snmr_input(:,vlev)), minval(snmr_input(:,vlev)) - enddo - isnmr = 1 - else - snmr_input = missing_value - print*,'SNMR NOT IN INPUT FILE' - isnmr = 0 - endif - - print* - print*,"READ GRLE" - allocate(grle_input(ij_input,lev)) - call read_vardata(indset, 'grle', work3d, errcode=iret) - if (iret == 0) then - do vlev = 1, lev - rvlev = lev+1-vlev - grle_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN GRLE AT LEVEL ', vlev, 'IS: ', maxval(grle_input(:,vlev)), minval(grle_input(:,vlev)) - enddo - igrle = 1 - else - grle_input = missing_value - print*,'GRLE NOT IN INPUT FILE' - igrle = 0 - endif - - print* - print*,"READ CLD_AMT" - allocate(cldamt_input(ij_input,lev)) - if (cld_amt) then - call read_vardata(indset, 'cld_amt', work3d, errcode=iret) - if (iret == 0) then - do vlev = 1, lev - rvlev = lev+1-vlev - cldamt_input(:,vlev) = reshape(work3d(:,:,rvlev),(/ij_input/)) - print*,'MAX/MIN CLD_AMT AT LEVEL ', vlev, 'IS: ', maxval(cldamt_input(:,vlev)), minval(cldamt_input(:,vlev)) - enddo - icldamt = 1 - else - cldamt_input = missing_value - print*,'CLDAMT NOT IN INPUT FILE' - icldamt = 0 - endif - else - cldamt_input = missing_value - print*,'CLDAMT NOT READ - CLD_AMT NAMELIST OPTION NOT SET TO TRUE' - icldamt = 0 - end if - - call read_vardata(indset, 'dpres', work3d, errcode=iret) - if (iret == 0) then - idpres = 1 - else - idpres = 0 - endif - call read_vardata(indset, 'delz', work3d, errcode=iret) - if (iret == 0) then - idelz = 1 - else - idelz = 0 - endif - - print*,"CLOSE FILE" - call close_dataset(indset) - deallocate(work2d,work3d) - -!--------------------------------------------------------------------------------------- -! Set the grib 1 grid description array need by the NCEP IPOLATES library. -!--------------------------------------------------------------------------------------- - - call calc_kgds(i_input, j_input, kgds_input) - - return - - end subroutine read_input_data - - subroutine read_vcoord_info - -!--------------------------------------------------------------------------------- -! Read vertical coordinate information. -!--------------------------------------------------------------------------------- - - implicit none - - integer :: istat, n, k, k2 - - real, allocatable :: ak(:), bk(:) - - type(Dataset) :: refdset - - print* - print*,"READ OUTPUT VERT COORDINATE INFO FROM REFERENCE FILE: ",trim(ref_file) - - refdset = open_dataset(ref_file) - call read_attribute(refdset, 'ak', ak) - call read_attribute(refdset, 'bk', bk) - call close_dataset(refdset) - - lev_output = size(bk) - 1 - - nvcoord=2 - allocate(vcoord(lev_output+1, nvcoord)) - - do k = 1, (lev_output+1) - k2 = lev_output+2 - k - vcoord(k,1) = ak(k2) - vcoord(k,2) = bk(k2) - print*,'VCOORD OF OUTPUT GRID ',k,vcoord(k,:) - enddo - - deallocate (ak, bk) - - end subroutine read_vcoord_info - - end module input_data diff --git a/sorc/enkf_chgres_recenter_nc.fd/interp.f90 b/sorc/enkf_chgres_recenter_nc.fd/interp.f90 deleted file mode 100644 index 291e8ef0d3..0000000000 --- a/sorc/enkf_chgres_recenter_nc.fd/interp.f90 +++ /dev/null @@ -1,582 +0,0 @@ - module interp - - implicit none - - private - - real, allocatable :: sfcp_b4_adj_output(:) - real, allocatable :: clwmr_b4_adj_output(:,:) - real, allocatable :: dzdt_b4_adj_output(:,:) - real, allocatable :: grle_b4_adj_output(:,:) - real, allocatable :: cldamt_b4_adj_output(:,:) - real, allocatable :: icmr_b4_adj_output(:,:) - real, allocatable :: o3mr_b4_adj_output(:,:) - real, allocatable :: rwmr_b4_adj_output(:,:) - real, allocatable :: snmr_b4_adj_output(:,:) - real, allocatable :: spfh_b4_adj_output(:,:) - real, allocatable :: tmp_b4_adj_output(:,:) - real, allocatable :: ugrd_b4_adj_output(:,:) - real, allocatable :: vgrd_b4_adj_output(:,:) - - public :: adjust_for_terrain - public :: gaus_to_gaus - - contains - - subroutine adjust_for_terrain - -!--------------------------------------------------------------------------------- -! Adjust fields based on differences between the interpolated and external -! terrain. -!--------------------------------------------------------------------------------- - - use input_data - use output_data - use utils - use setup - - implicit none - - integer :: k - - real, allocatable :: pres_b4_adj_output(:,:) - real, allocatable :: pres_output(:,:) - real, allocatable :: q_b4_adj_output(:,:,:), q_output(:,:,:) - -!--------------------------------------------------------------------------------- -! First, compute the mid-layer pressure using the interpolated surface pressure. -!--------------------------------------------------------------------------------- - - allocate(pres_b4_adj_output(ij_output,lev)) - pres_b4_adj_output = 0.0 - - print*,'before newpr1, sfcp b4 adj: ', sfcp_b4_adj_output(ij_output/2) - - print* - print*,"COMPUTE MID-LAYER PRESSURE FROM INTERPOLATED SURFACE PRESSURE." - call newpr1(ij_output, lev, idvc, idsl, nvcoord_input, vcoord_input, & - sfcp_b4_adj_output, pres_b4_adj_output) - - print*,'after newpr1, pres b4 adj: ', pres_b4_adj_output(ij_output/2,:) - -!--------------------------------------------------------------------------------- -! Adjust surface pressure based on differences between interpolated and -! grid terrain. -!--------------------------------------------------------------------------------- - - allocate(sfcp_output(ij_output)) - sfcp_output = 0.0 - - print*,"ADJUST SURFACE PRESSURE BASED ON TERRAIN DIFFERENCES" - call newps(hgt_output, sfcp_b4_adj_output, ij_output, & - lev, pres_b4_adj_output, tmp_b4_adj_output, & - spfh_b4_adj_output, hgt_external_output, sfcp_output) - - print*,'after newps ',sfcp_b4_adj_output(ij_output/2),sfcp_output(ij_output/2) - - deallocate(sfcp_b4_adj_output) - -!--------------------------------------------------------------------------------- -! Recompute mid-layer pressure based on the adjusted surface pressure. -!--------------------------------------------------------------------------------- - - allocate(pres_output(ij_output, lev_output)) - pres_output = 0.0 - - allocate(dpres_output(ij_output, lev_output)) - dpres_output = 0.0 - - print*,'before newpr1 ',sfcp_output(ij_output/2) - print*,'before newpr1 ',idvc,idsl,nvcoord,vcoord - - print*,"RECOMPUTE MID-LAYER PRESSURE." - call newpr1(ij_output, lev_output, idvc, idsl, nvcoord, vcoord, & - sfcp_output, pres_output, dpres_output) - - do k = 1, lev_output - print*,'after newpr1 ',pres_output(ij_output/2,k), dpres_output(ij_output/2,k) - enddo - -!--------------------------------------------------------------------------------- -! Vertically interpolate from the pre-adjusted to the adjusted mid-layer -! pressures. -!--------------------------------------------------------------------------------- - - allocate(q_b4_adj_output(ij_output,lev,ntrac)) - q_b4_adj_output(:,:,1) = spfh_b4_adj_output(:,:) - q_b4_adj_output(:,:,2) = o3mr_b4_adj_output(:,:) - q_b4_adj_output(:,:,3) = clwmr_b4_adj_output(:,:) - q_b4_adj_output(:,:,4) = rwmr_b4_adj_output(:,:) - q_b4_adj_output(:,:,5) = icmr_b4_adj_output(:,:) - q_b4_adj_output(:,:,6) = snmr_b4_adj_output(:,:) - q_b4_adj_output(:,:,7) = grle_b4_adj_output(:,:) - q_b4_adj_output(:,:,8) = cldamt_b4_adj_output(:,:) - - allocate(q_output(ij_output,lev_output,ntrac)) - q_output = 0.0 - - allocate(dzdt_output(ij_output,lev_output)) - dzdt_output = 0.0 - - allocate(ugrd_output(ij_output,lev_output)) - ugrd_output=0.0 - - allocate(vgrd_output(ij_output,lev_output)) - vgrd_output=0.0 - - allocate(tmp_output(ij_output,lev_output)) - tmp_output=0.0 - - print*,"VERTICALLY INTERPOLATE TO NEW PRESSURE LEVELS" - call vintg(ij_output, lev, lev_output, ntrac, pres_b4_adj_output, & - ugrd_b4_adj_output, vgrd_b4_adj_output, tmp_b4_adj_output, q_b4_adj_output, & - dzdt_b4_adj_output, pres_output, ugrd_output, vgrd_output, tmp_output, & - q_output, dzdt_output) - - deallocate (dzdt_b4_adj_output, q_b4_adj_output) -!deallocate (pres_b4_adj_output, pres_output) - - allocate(spfh_output(ij_output,lev_output)) - spfh_output = q_output(:,:,1) - allocate(o3mr_output(ij_output,lev_output)) - o3mr_output = q_output(:,:,2) - allocate(clwmr_output(ij_output,lev_output)) - clwmr_output = q_output(:,:,3) - allocate(rwmr_output(ij_output,lev_output)) - rwmr_output = q_output(:,:,4) - allocate(icmr_output(ij_output,lev_output)) - icmr_output = q_output(:,:,5) - allocate(snmr_output(ij_output,lev_output)) - snmr_output = q_output(:,:,6) - allocate(grle_output(ij_output,lev_output)) - grle_output = q_output(:,:,7) - allocate(cldamt_output(ij_output,lev_output)) - cldamt_output = q_output(:,:,8) - - deallocate(q_output) - - do k = 1, lev - print*,'after vintg tmp b4 ',tmp_b4_adj_output(ij_output/2,k), pres_b4_adj_output(ij_output/2,k) - enddo - do k = 1, lev_output - print*,'after vintg tmp ',tmp_output(ij_output/2,k),pres_output(ij_output/2,k) - enddo - - deallocate(tmp_b4_adj_output) - - deallocate(ugrd_b4_adj_output) - - deallocate(vgrd_b4_adj_output) - - deallocate(spfh_b4_adj_output) - - deallocate(o3mr_b4_adj_output) - - deallocate(clwmr_b4_adj_output) - - deallocate(rwmr_b4_adj_output) - - deallocate(icmr_b4_adj_output) - - deallocate(snmr_b4_adj_output) - - deallocate(grle_b4_adj_output) - - deallocate(cldamt_b4_adj_output) - - allocate(delz_output(ij_output, lev_output)) - delz_output = 0.0 - - call compute_delz(ij_output, lev_output, vcoord(:,1), vcoord(:,2), sfcp_output, hgt_output, & - tmp_output, spfh_output, delz_output) - - do k = 1, lev_output - print*,'after compute_delz ',delz_output(ij_output/2,k) - enddo - - deallocate(hgt_output) - - end subroutine adjust_for_terrain - - subroutine gaus_to_gaus - -!---------------------------------------------------------------------------------- -! Interpolate data from the input to output grid using IPOLATES library. -!---------------------------------------------------------------------------------- - - use output_data - use input_data - use setup - - implicit none - - integer :: ip, ipopt(20), i - integer :: num_fields - integer :: iret, numpts - integer, allocatable :: ibi(:), ibo(:) - - logical*1, allocatable :: bitmap_input(:,:), bitmap_output(:,:) - logical :: same_grid - - real, allocatable :: data_input(:,:) - real, allocatable :: data_output(:,:), crot(:), srot(:) - - same_grid=.true. - do i = 1, 11 - if (kgds_input(i) /= kgds_output(i)) then - same_grid=.false. - exit - endif - enddo - - if (same_grid) then - - print* - print*,'INPUT AND OUTPUT GRIDS ARE THE SAME.' - print*,'NO HORIZ INTERPOLATION REQUIRED.' - - allocate(hgt_output(ij_output)) - hgt_output = hgt_input - deallocate(hgt_input) - - allocate(sfcp_b4_adj_output(ij_output)) - sfcp_b4_adj_output = sfcp_input - deallocate(sfcp_input) - - allocate(tmp_b4_adj_output(ij_output,lev)) - tmp_b4_adj_output = tmp_input - deallocate(tmp_input) - - allocate(clwmr_b4_adj_output(ij_output,lev)) - clwmr_b4_adj_output = clwmr_input - deallocate(clwmr_input) - - allocate(spfh_b4_adj_output(ij_output,lev)) - spfh_b4_adj_output = spfh_input - deallocate(spfh_input) - - allocate(o3mr_b4_adj_output(ij_output,lev)) - o3mr_b4_adj_output = o3mr_input - deallocate(o3mr_input) - - allocate(dzdt_b4_adj_output(ij_output,lev)) - dzdt_b4_adj_output = dzdt_input - deallocate(dzdt_input) - - allocate(rwmr_b4_adj_output(ij_output,lev)) - rwmr_b4_adj_output = rwmr_input - deallocate(rwmr_input) - - allocate(snmr_b4_adj_output(ij_output,lev)) - snmr_b4_adj_output = snmr_input - deallocate(snmr_input) - - allocate(icmr_b4_adj_output(ij_output,lev)) - icmr_b4_adj_output = icmr_input - deallocate(icmr_input) - - allocate(grle_b4_adj_output(ij_output,lev)) - grle_b4_adj_output = grle_input - deallocate(grle_input) - - allocate(cldamt_b4_adj_output(ij_output,lev)) - cldamt_b4_adj_output = cldamt_input - deallocate(cldamt_input) - - allocate(ugrd_b4_adj_output(ij_output,lev)) - ugrd_b4_adj_output = ugrd_input - deallocate(ugrd_input) - - allocate(vgrd_b4_adj_output(ij_output,lev)) - vgrd_b4_adj_output = vgrd_input - deallocate(vgrd_input) - - else - - print* - print*,'INTERPOLATE DATA TO OUTPUT GRID' - - - ip = 0 ! bilinear - ipopt = 0 - -!---------------------------------------------------------------------------------- -! Do 2-D fields first -!---------------------------------------------------------------------------------- - - num_fields = 1 - - allocate(ibi(num_fields)) - ibi = 0 ! no bitmap - allocate(ibo(num_fields)) - ibo = 0 ! no bitmap - - allocate(bitmap_input(ij_input,num_fields)) - bitmap_input = .true. - allocate(bitmap_output(ij_output,num_fields)) - bitmap_output = .true. - - allocate(rlat_output(ij_output)) - rlat_output = 0.0 - allocate(rlon_output(ij_output)) - rlon_output = 0.0 - -!---------------- -! Surface height -!---------------- - - allocate(data_input(ij_input,num_fields)) - data_input(:,num_fields) = hgt_input(:) - deallocate(hgt_input) - - allocate(data_output(ij_output,num_fields)) - data_output = 0 - - print*,"INTERPOLATE SURFACE HEIGHT" - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, data_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - data_output, iret) - if (iret /= 0) goto 89 - - allocate(hgt_output(ij_output)) - hgt_output = data_output(:,num_fields) - -!------------------ -! surface pressure -!------------------ - - data_input(:,num_fields) = sfcp_input(:) - deallocate(sfcp_input) - - print*,"INTERPOLATE SURFACE PRESSURE" - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, data_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - data_output, iret) - if (iret /= 0) goto 89 - - allocate(sfcp_b4_adj_output(ij_output)) - sfcp_b4_adj_output = data_output(:,num_fields) - - deallocate(ibi, ibo, bitmap_input, bitmap_output, data_input, data_output) - -!---------------------------------------------------------------------------------- -! 3d scalars -!---------------------------------------------------------------------------------- - - num_fields = lev - - allocate(ibi(num_fields)) - ibi = 0 ! no bitmap - allocate(ibo(num_fields)) - ibo = 0 ! no bitmap - - allocate(bitmap_input(ij_input,num_fields)) - bitmap_input = .true. - allocate(bitmap_output(ij_output,num_fields)) - bitmap_output = .true. - -!------------- -! Temperature -!------------- - - allocate(tmp_b4_adj_output(ij_output,num_fields)) - tmp_b4_adj_output = 0 - - print*,'INTERPOLATE TEMPERATURE' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, tmp_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - tmp_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(tmp_input) - -!-------------------- -! Cloud liquid water -!-------------------- - - allocate(clwmr_b4_adj_output(ij_output,num_fields)) - clwmr_b4_adj_output = 0 - - print*,'INTERPOLATE CLOUD LIQUID WATER' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, clwmr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - clwmr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(clwmr_input) - -!-------------------- -! Specific humidity -!-------------------- - - allocate(spfh_b4_adj_output(ij_output,num_fields)) - spfh_b4_adj_output = 0 - - print*,'INTERPOLATE SPECIFIC HUMIDITY' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, spfh_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - spfh_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(spfh_input) - -!----------- -! Ozone -!----------- - - allocate(o3mr_b4_adj_output(ij_output,num_fields)) - o3mr_b4_adj_output = 0 - - print*,'INTERPOLATE OZONE' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, o3mr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - o3mr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(o3mr_input) - -!----------- -! DZDT -!----------- - - allocate(dzdt_b4_adj_output(ij_output,num_fields)) - dzdt_b4_adj_output = 0 - - print*,'INTERPOLATE DZDT' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, dzdt_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - dzdt_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(dzdt_input) - -!---------------------------------------------------------------------------------- -! Interpolate additional 3-d scalars for GFDL microphysics. -!---------------------------------------------------------------------------------- - - -!------------- -! Rain water -!------------- - - allocate(rwmr_b4_adj_output(ij_output,num_fields)) - rwmr_b4_adj_output = 0 - - print*,'INTERPOLATE RWMR' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, rwmr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - rwmr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(rwmr_input) - -!------------- -! Snow water -!------------- - - allocate(snmr_b4_adj_output(ij_output,num_fields)) - snmr_b4_adj_output = 0 - - print*,'INTERPOLATE SNMR' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, snmr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - snmr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(snmr_input) - -!------------- -! Ice water -!------------- - - allocate(icmr_b4_adj_output(ij_output,num_fields)) - icmr_b4_adj_output = 0 - - print*,'INTERPOLATE ICMR' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, icmr_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - icmr_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(icmr_input) - -!------------- -! Graupel -!------------- - - allocate(grle_b4_adj_output(ij_output,num_fields)) - grle_b4_adj_output = 0 - - print*,'INTERPOLATE GRLE' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, grle_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - grle_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(grle_input) - - -!--------------------------- -! Cloud amount -!--------------------------- - - allocate(cldamt_b4_adj_output(ij_output,num_fields)) - cldamt_b4_adj_output = 0 - - print*,'INTERPOLATE CLD_AMT' - call ipolates(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, cldamt_input, & - numpts, rlat_output, rlon_output, ibo, bitmap_output, & - cldamt_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate(cldamt_input) - - - -!---------------------------------------------------------------------------------- -! 3d u/v winds -!---------------------------------------------------------------------------------- - - allocate(crot(ij_output), srot(ij_output)) - crot = 0. - srot = 0. - - allocate(ugrd_b4_adj_output(ij_output,num_fields)) - ugrd_b4_adj_output = 0 - allocate(vgrd_b4_adj_output(ij_output,num_fields)) - vgrd_b4_adj_output = 0 - - print*,'INTERPOLATE WINDS' - call ipolatev(ip, ipopt, kgds_input, kgds_output, ij_input, ij_output,& - num_fields, ibi, bitmap_input, ugrd_input, vgrd_input, & - numpts, rlat_output, rlon_output, crot, srot, ibo, bitmap_output, & - ugrd_b4_adj_output, vgrd_b4_adj_output, iret) - if (iret /= 0) goto 89 - - deallocate (ugrd_input, vgrd_input) - deallocate (crot, srot) - deallocate (ibi, ibo, bitmap_input, bitmap_output) - - endif - - return - - 89 continue - print*,"FATAL ERROR IN IPOLATES. IRET IS: ", iret - call errexit(23) - - end subroutine gaus_to_gaus - - end module interp diff --git a/sorc/enkf_chgres_recenter_nc.fd/makefile b/sorc/enkf_chgres_recenter_nc.fd/makefile deleted file mode 100644 index c9f4c7be37..0000000000 --- a/sorc/enkf_chgres_recenter_nc.fd/makefile +++ /dev/null @@ -1,28 +0,0 @@ -SHELL= /bin/sh - -LIBS= $(FV3GFS_NCIO_LIB) $(BACIO_LIB4) $(W3NCO_LIB4) $(IP_LIB4) $(SP_LIB4) -L$(NETCDF)/lib -lnetcdff -lnetcdf -lhdf5_hl -lhdf5 -lz - -CMD= enkf_chgres_recenter_nc.x - -OBJS = driver.o input_data.o interp.o output_data.o utils.o setup.o - -$(CMD): $(OBJS) - $(FC) $(FFLAGS) -o $(CMD) $(OBJS) $(LIBS) - -driver.o: setup.o output_data.o interp.o input_data.o driver.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c driver.f90 -interp.o: setup.o utils.o output_data.o input_data.o interp.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c interp.f90 -input_data.o: setup.o utils.o input_data.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c input_data.f90 -output_data.o: setup.o utils.o input_data.o output_data.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c output_data.f90 -setup.o: setup.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c setup.f90 -utils.o: utils.f90 - $(FC) $(FFLAGS) -I$(FV3GFS_NCIO_INC) -I$(NETCDF)/include -c utils.f90 -clean: - rm -f *.o *.mod ${CMD} -install: - -cp $(CMD) ../../exec/. - diff --git a/sorc/enkf_chgres_recenter_nc.fd/output_data.f90 b/sorc/enkf_chgres_recenter_nc.fd/output_data.f90 deleted file mode 100644 index 00b39fc7c8..0000000000 --- a/sorc/enkf_chgres_recenter_nc.fd/output_data.f90 +++ /dev/null @@ -1,288 +0,0 @@ - module output_data - - use module_ncio - - implicit none - - private - - integer, public :: kgds_output(200) - -! data on the output grid. - real, allocatable, public :: hgt_output(:) ! interpolated from input grid - real, allocatable, public :: hgt_external_output(:) - real, allocatable, public :: sfcp_output(:) - real, allocatable, public :: tmp_output(:,:) - real, allocatable, public :: clwmr_output(:,:) - real, allocatable, public :: delz_output(:,:) - real, allocatable, public :: dpres_output(:,:) - real, allocatable, public :: dzdt_output(:,:) - real, allocatable, public :: o3mr_output(:,:) - real, allocatable, public :: spfh_output(:,:) - real, allocatable, public :: ugrd_output(:,:) - real, allocatable, public :: vgrd_output(:,:) - real, allocatable, public :: rwmr_output(:,:) - real, allocatable, public :: icmr_output(:,:) - real, allocatable, public :: snmr_output(:,:) - real, allocatable, public :: grle_output(:,:) - real, allocatable, public :: cldamt_output(:,:) - real, allocatable, public :: rlat_output(:) - real, allocatable, public :: rlon_output(:) - - public :: set_output_grid - public :: write_output_data - type(Dataset) :: indset, outdset - - - contains - - subroutine set_output_grid - -!------------------------------------------------------------------- -! Set grid specs on the output grid. -!------------------------------------------------------------------- - - use setup - use input_data - use utils - - implicit none - - - type(Dataset) :: indset - real, allocatable :: work2d(:,:) - - - - print* - print*,"OUTPUT GRID I/J DIMENSIONS: ", i_output, j_output - -!------------------------------------------------------------------- -! Set the grib 1 grid description section, which is needed -! by the IPOLATES library. -!------------------------------------------------------------------- - - kgds_output = 0 - - call calc_kgds(i_output, j_output, kgds_output) - -!------------------------------------------------------------------- -! Read the terrain on the output grid. To ensure exact match, -! read it from an existing netcdf file. -!------------------------------------------------------------------- - - print* - print*,"OPEN OUTPUT GRID TERRAIN FILE: ", trim(terrain_file) - indset = open_dataset(terrain_file) - - allocate(hgt_external_output(ij_output)) - - print* - print*,"READ SURFACE HEIGHT" - call read_vardata(indset, 'hgtsfc', work2d) - - hgt_external_output = reshape(work2d,(/ij_output/)) - - call close_dataset(indset) - - end subroutine set_output_grid - - subroutine write_output_data - -!------------------------------------------------------------------- -! Write output grid data to a netcdf file. -!------------------------------------------------------------------- - - use input_data - use setup - - implicit none - - integer :: n,nrev - real, allocatable, dimension (:,:) :: out2d - real, allocatable, dimension (:,:,:) :: out3d - -!------------------------------------------------------------------- -! Set up some header info. -!------------------------------------------------------------------- - - call header_set - -!------------------------------------------------------------------- -! Open and write file. -!------------------------------------------------------------------- -! TODO: note there can be compression applied to this output file if necessary -! see how it's done in the GSI EnKF for example - - - print* - print*,'OPEN OUTPUT FILE: ',trim(output_file) - allocate(out2d(i_output,j_output)) - allocate(out3d(i_output,j_output,lev_output)) - - print*,"WRITE SURFACE HEIGHT" - out2d = reshape(hgt_external_output, (/i_output,j_output/)) - call write_vardata(outdset, 'hgtsfc', out2d) - deallocate(hgt_external_output) - - print*,"WRITE SURFACE PRESSURE" - out2d = reshape(sfcp_output, (/i_output,j_output/)) - call write_vardata(outdset, 'pressfc', out2d) - deallocate(sfcp_output) - - print*,"WRITE TEMPERATURE" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(tmp_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'tmp', out3d) - deallocate(tmp_output) - - print*,"WRITE CLOUD LIQUID WATER" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(clwmr_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'clwmr', out3d) - deallocate(clwmr_output) - - print*,"WRITE SPECIFIC HUMIDITY" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(spfh_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'spfh', out3d) - deallocate(spfh_output) - - print*,"WRITE OZONE" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(o3mr_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'o3mr', out3d) - deallocate(o3mr_output) - - print*,"WRITE U-WINDS" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(ugrd_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'ugrd', out3d) - deallocate(ugrd_output) - - print*,"WRITE V-WINDS" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(vgrd_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'vgrd', out3d) - deallocate(vgrd_output) - - if (idzdt == 1) then - print*,"WRITE DZDT" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(dzdt_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'dzdt', out3d) - deallocate(dzdt_output) - endif - - if (idpres == 1) then - print*,"WRITE DPRES" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(dpres_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'dpres', out3d) - endif - deallocate(dpres_output) - - if (idelz == 1) then - print*,"WRITE DELZ" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(delz_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'delz', out3d) - endif - deallocate(delz_output) - - if (irwmr == 1) then - print*,"WRITE RAIN WATER" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(rwmr_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'rwmr', out3d) - deallocate(rwmr_output) - endif - - if (isnmr == 1) then - print*,"WRITE SNOW WATER" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(snmr_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'snmr', out3d) - deallocate(snmr_output) - endif - - if (iicmr == 1) then - print*,"WRITE ICE WATER" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(icmr_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'icmr', out3d) - deallocate(icmr_output) - endif - - if (igrle == 1) then - print*,"WRITE GRAUPEL" - do n=1,lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(grle_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'grle', out3d) - deallocate(grle_output) - endif - - if (icldamt == 1) then - print*,"WRITE CLD_AMT" - do n = 1, lev_output - nrev = lev_output+1-n - out3d(:,:,n) = reshape(cldamt_output(:,nrev), (/i_output,j_output/)) - end do - call write_vardata(outdset, 'cld_amt', out3d) - deallocate(cldamt_output) - endif - - - deallocate(out2d,out3d) - - return - - end subroutine write_output_data - - subroutine header_set - -!------------------------------------------------------------------- -! copy dimensions and metadata to the output file from the -! input terrain (output res) file -!------------------------------------------------------------------- - - use input_data - use setup - - implicit none - - print* - print*,"SET HEADER INFO FOR OUTPUT FILE." - - indset = open_dataset(ref_file) - outdset = create_dataset(output_file, indset, nocompress=.true.) - - end subroutine header_set - - end module output_data diff --git a/sorc/enkf_chgres_recenter_nc.fd/setup.f90 b/sorc/enkf_chgres_recenter_nc.fd/setup.f90 deleted file mode 100644 index ee9956ae03..0000000000 --- a/sorc/enkf_chgres_recenter_nc.fd/setup.f90 +++ /dev/null @@ -1,55 +0,0 @@ - module setup - - implicit none - - private - - character(len=300), public :: input_file - character(len=300), public :: output_file - character(len=300), public :: terrain_file - character(len=300), public :: ref_file - - integer, public :: i_output - integer, public :: j_output - integer , public :: ij_output - logical, public :: cld_amt - - public :: program_setup - - contains - - subroutine program_setup - - implicit none - - integer :: istat - character(len=500) :: filenamelist - - namelist /chgres_setup/ i_output, j_output, input_file, output_file, & - terrain_file, cld_amt, ref_file - - cld_amt = .false. ! default option - - print* - call getarg(1,filenamelist) - print*,"OPEN SETUP NAMELIST ",trim(filenamelist) - open(43, file=filenamelist, iostat=istat) - if (istat /= 0) then - print*,"FATAL ERROR OPENING NAMELIST FILE. ISTAT IS: ",istat - stop - endif - - print*,"READ SETUP NAMELIST." - read(43, nml=chgres_setup, iostat=istat) - if (istat /= 0) then - print*,"FATAL ERROR READING NAMELIST FILE. ISTAT IS: ",istat - stop - endif - - ij_output = i_output * j_output - - close(43) - - end subroutine program_setup - - end module setup diff --git a/sorc/enkf_chgres_recenter_nc.fd/utils.f90 b/sorc/enkf_chgres_recenter_nc.fd/utils.f90 deleted file mode 100644 index 786c3644b5..0000000000 --- a/sorc/enkf_chgres_recenter_nc.fd/utils.f90 +++ /dev/null @@ -1,776 +0,0 @@ - module utils - - private - - public :: calc_kgds - public :: newps - public :: newpr1 - public :: vintg - public :: compute_delz - - contains - - subroutine compute_delz(ijm, levp, ak_in, bk_in, ps, zs, t, sphum, delz) - - implicit none - integer, intent(in):: levp, ijm - real, intent(in), dimension(levp+1):: ak_in, bk_in - real, intent(in), dimension(ijm):: ps, zs - real, intent(in), dimension(ijm,levp):: t - real, intent(in), dimension(ijm,levp):: sphum - real, intent(out), dimension(ijm,levp):: delz -! Local: - real, dimension(ijm,levp+1):: zh - real, dimension(ijm,levp+1):: pe0, pn0 - real, dimension(levp+1) :: ak, bk - integer i,k - real, parameter :: GRAV = 9.80665 - real, parameter :: RDGAS = 287.05 - real, parameter :: RVGAS = 461.50 - real :: zvir - real:: grd - - print*,"COMPUTE LAYER THICKNESS." - - grd = grav/rdgas - zvir = rvgas/rdgas - 1. - ak = ak_in - bk = bk_in - ak(levp+1) = max(1.e-9, ak(levp+1)) - - do i=1, ijm - pe0(i,levp+1) = ak(levp+1) - pn0(i,levp+1) = log(pe0(i,levp+1)) - enddo - - do k=levp,1, -1 - do i=1,ijm - pe0(i,k) = ak(k) + bk(k)*ps(i) - pn0(i,k) = log(pe0(i,k)) - enddo - enddo - - do i = 1, ijm - zh(i,1) = zs(i) - enddo - - do k = 2, levp+1 - do i = 1, ijm - zh(i,k) = zh(i,k-1)+t(i,k-1)*(1.+zvir*sphum(i,k-1))* & - (pn0(i,k-1)-pn0(i,k))/grd - enddo - enddo - - do k = 1, levp - do i = 1, ijm - delz(i,k) = zh(i,k) - zh(i,k+1) - enddo - enddo - - end subroutine compute_delz - - subroutine calc_kgds(idim, jdim, kgds) - - implicit none - - integer, intent(in) :: idim, jdim - - integer, intent(out) :: kgds(200) - - kgds = 0 - kgds(1) = 4 ! OCT 6 - TYPE OF GRID (GAUSSIAN) - kgds(2) = idim ! OCT 7-8 - # PTS ON LATITUDE CIRCLE - kgds(3) = jdim ! OCT 9-10 - # PTS ON LONGITUDE CIRCLE - kgds(4) = 90000 ! OCT 11-13 - LAT OF ORIGIN - kgds(5) = 0 ! OCT 14-16 - LON OF ORIGIN - kgds(6) = 128 ! OCT 17 - RESOLUTION FLAG - kgds(7) = -90000 ! OCT 18-20 - LAT OF EXTREME POINT - kgds(8) = nint(-360000./idim) ! OCT 21-23 - LON OF EXTREME POINT - kgds(9) = nint((360.0 / float(idim))*1000.0) - ! OCT 24-25 - LONGITUDE DIRECTION INCR. - kgds(10) = jdim/2 ! OCT 26-27 - NUMBER OF CIRCLES POLE TO EQUATOR - kgds(12) = 255 ! OCT 29 - RESERVED - kgds(20) = 255 ! OCT 5 - NOT USED, SET TO 255 - - end subroutine calc_kgds - - SUBROUTINE NEWPS(ZS,PS,IM,KM,P,T,Q,ZSNEW,PSNEW) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: NEWPS COMPUTE NEW SURFACE PRESSURE -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 92-10-31 -! -! ABSTRACT: COMPUTES A NEW SURFACE PRESSURE GIVEN A NEW OROGRAPHY. -! THE NEW PRESSURE IS COMPUTED ASSUMING A HYDROSTATIC BALANCE -! AND A CONSTANT TEMPERATURE LAPSE RATE. BELOW GROUND, THE -! LAPSE RATE IS ASSUMED TO BE -6.5 K/KM. -! -! PROGRAM HISTORY LOG: -! 91-10-31 MARK IREDELL -! -! USAGE: CALL NEWPS(ZS,PS,IM,KM,P,T,Q,ZSNEW,PSNEW) -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF POINTS TO COMPUTE -! ZS REAL (IM) OLD OROGRAPHY (M) -! PS REAL (IM) OLD SURFACE PRESSURE (PA) -! KM INTEGER NUMBER OF LEVELS -! P REAL (IM,KM) PRESSURES (PA) -! T REAL (IM,KM) TEMPERATURES (K) -! Q REAL (IM,KM) SPECIFIC HUMIDITIES (KG/KG) -! ZSNEW REAL (IM) NEW OROGRAPHY (M) -! OUTPUT ARGUMENT LIST: -! PSNEW REAL (IM) NEW SURFACE PRESSURE (PA) -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ - REAL ZS(IM),PS(IM),P(IM,KM),T(IM,KM),Q(IM,KM) - REAL ZSNEW(IM),PSNEW(IM) - PARAMETER(BETA=-6.5E-3,EPSILON=1.E-9) - PARAMETER(G=9.80665,RD=287.05,RV=461.50) - PARAMETER(GOR=G/RD,FV=RV/RD-1.) - REAL ZU(IM) - FTV(AT,AQ)=AT*(1+FV*AQ) - FGAM(APU,ATVU,APD,ATVD)=-GOR*LOG(ATVD/ATVU)/LOG(APD/APU) - FZ0(AP,ATV,AZD,APD)=AZD+ATV/GOR*LOG(APD/AP) - FZ1(AP,ATV,AZD,APD,AGAM)=AZD-ATV/AGAM*((APD/AP)**(-AGAM/GOR)-1) - FP0(AZ,AZU,APU,ATVU)=APU*EXP(-GOR/ATVU*(AZ-AZU)) - FP1(AZ,AZU,APU,ATVU,AGAM)=APU*(1+AGAM/ATVU*(AZ-AZU))**(-GOR/AGAM) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE SURFACE PRESSURE BELOW THE ORIGINAL GROUND - LS=0 - K=1 - GAMMA=BETA - DO I=1,IM - PU=P(I,K) - TVU=FTV(T(I,K),Q(I,K)) - ZU(I)=FZ1(PU,TVU,ZS(I),PS(I),GAMMA) - IF(ZSNEW(I).LE.ZU(I)) THEN - PU=P(I,K) - TVU=FTV(T(I,K),Q(I,K)) - IF(ABS(GAMMA).GT.EPSILON) THEN - PSNEW(I)=FP1(ZSNEW(I),ZU(I),PU,TVU,GAMMA) - ELSE - PSNEW(I)=FP0(ZSNEW(I),ZU(I),PU,TVU) - ENDIF - ELSE - PSNEW(I)=0 - LS=LS+1 - ENDIF -! endif - ENDDO -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE SURFACE PRESSURE ABOVE THE ORIGINAL GROUND - DO K=2,KM - IF(LS.GT.0) THEN - DO I=1,IM - IF(PSNEW(I).EQ.0) THEN - PU=P(I,K) - TVU=FTV(T(I,K),Q(I,K)) - PD=P(I,K-1) - TVD=FTV(T(I,K-1),Q(I,K-1)) - GAMMA=FGAM(PU,TVU,PD,TVD) - IF(ABS(GAMMA).GT.EPSILON) THEN - ZU(I)=FZ1(PU,TVU,ZU(I),PD,GAMMA) - ELSE - ZU(I)=FZ0(PU,TVU,ZU(I),PD) - ENDIF - IF(ZSNEW(I).LE.ZU(I)) THEN - IF(ABS(GAMMA).GT.EPSILON) THEN - PSNEW(I)=FP1(ZSNEW(I),ZU(I),PU,TVU,GAMMA) - ELSE - PSNEW(I)=FP0(ZSNEW(I),ZU(I),PU,TVU) - ENDIF - LS=LS-1 - ENDIF - ENDIF - ENDDO - ENDIF - ENDDO -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE SURFACE PRESSURE OVER THE TOP - IF(LS.GT.0) THEN - K=KM - GAMMA=0 - DO I=1,IM - IF(PSNEW(I).EQ.0) THEN - PU=P(I,K) - TVU=FTV(T(I,K),Q(I,K)) - PSNEW(I)=FP0(ZSNEW(I),ZU(I),PU,TVU) - ENDIF - ENDDO - ENDIF - END SUBROUTINE NEWPS - - SUBROUTINE NEWPR1(IM,KM,IDVC,IDSL,NVCOORD,VCOORD, & - PS,PM,DP) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: NEWPR1 COMPUTE MODEL PRESSURES -! PRGMMR: JUANG ORG: W/NMC23 DATE: 2005-04-11 -! PRGMMR: Fanglin Yang ORG: W/NMC23 DATE: 2006-11-28 -! PRGMMR: S. Moorthi ORG: NCEP/EMC DATE: 2006-12-12 -! PRGMMR: S. Moorthi ORG: NCEP/EMC DATE: 2007-01-02 -! -! ABSTRACT: COMPUTE MODEL PRESSURES. -! -! PROGRAM HISTORY LOG: -! 2005-04-11 HANN_MING HENRY JUANG hybrid sigma, sigma-p, and sigma- -! -! USAGE: CALL NEWPR1(IM,IX,KM,KMP,IDVC,IDSL,NVCOORD,VCOORD,PP,TP,QP,P -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF POINTS TO COMPUTE -! KM INTEGER NUMBER OF LEVELS -! IDVC INTEGER VERTICAL COORDINATE ID -! (1 FOR SIGMA AND 2 FOR HYBRID) -! IDSL INTEGER TYPE OF SIGMA STRUCTURE -! (1 FOR PHILLIPS OR 2 FOR MEAN) -! NVCOORD INTEGER NUMBER OF VERTICAL COORDINATES -! VCOORD REAL (KM+1,NVCOORD) VERTICAL COORDINATE VALUES -! FOR IDVC=1, NVCOORD=1: SIGMA INTERFACE -! FOR IDVC=2, NVCOORD=2: HYBRID INTERFACE A AND B -! FOR IDVC=3, NVCOORD=3: JUANG GENERAL HYBRID INTERFACE -! AK REAL (KM+1) HYBRID INTERFACE A -! BK REAL (KM+1) HYBRID INTERFACE B -! PS REAL (IX) SURFACE PRESSURE (PA) -! OUTPUT ARGUMENT LIST: -! PM REAL (IX,KM) MID-LAYER PRESSURE (PA) -! DP REAL (IX,KM) LAYER DELTA PRESSURE (PA) -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ - IMPLICIT NONE - - INTEGER, INTENT(IN) :: IM, KM, NVCOORD, IDVC, IDSL - - REAL, INTENT(IN) :: VCOORD(KM+1,NVCOORD) - REAL, INTENT(IN) :: PS(IM) - - REAL, INTENT(OUT) :: PM(IM,KM) - REAL, OPTIONAL, INTENT(OUT) :: DP(IM,KM) - - REAL, PARAMETER :: RD=287.05, RV=461.50, CP=1004.6, & - ROCP=RD/CP, ROCP1=ROCP+1, ROCPR=1/ROCP, & - FV=RV/RD-1. - - INTEGER :: I, K - - REAL :: AK(KM+1), BK(KM+1), PI(IM,KM+1) - - IF(IDVC.EQ.2) THEN - DO K=1,KM+1 - AK(K) = VCOORD(K,1) - BK(K) = VCOORD(K,2) - PI(:,K) = AK(K) + BK(K)*PS(:) - ENDDO - ELSE - print*,'routine only works for idvc 2' - stop - ENDIF - - IF(IDSL.EQ.2) THEN - DO K=1,KM - PM(1:IM,K) = (PI(1:IM,K)+PI(1:IM,K+1))/2 - ENDDO - ELSE - DO K=1,KM - PM(1:IM,K) = ((PI(1:IM,K)**ROCP1-PI(1:IM,K+1)**ROCP1)/ & - (ROCP1*(PI(1:IM,K)-PI(1:IM,K+1))))**ROCPR - ENDDO - ENDIF - - IF(PRESENT(DP))THEN - DO K=1,KM - DO I=1,IM - DP(I,K) = PI(I,K) - PI(I,K+1) - ENDDO - ENDDO - ENDIF - - END SUBROUTINE NEWPR1 - - SUBROUTINE TERP3(IM,IXZ1,IXQ1,IXZ2,IXQ2,NM,NXQ1,NXQ2, & - KM1,KXZ1,KXQ1,Z1,Q1,KM2,KXZ2,KXQ2,Z2,Q2,J2) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: TERP3 CUBICALLY INTERPOLATE IN ONE DIMENSION -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 98-05-01 -! -! ABSTRACT: INTERPOLATE FIELD(S) IN ONE DIMENSION ALONG THE COLUMN(S). -! THE INTERPOLATION IS CUBIC LAGRANGIAN WITH A MONOTONIC CONSTRAINT -! IN THE CENTER OF THE DOMAIN. IN THE OUTER INTERVALS IT IS LINEAR. -! OUTSIDE THE DOMAIN, FIELDS ARE HELD CONSTANT. -! -! PROGRAM HISTORY LOG: -! 98-05-01 MARK IREDELL -! 1999-01-04 IREDELL USE ESSL SEARCH -! -! USAGE: CALL TERP3(IM,IXZ1,IXQ1,IXZ2,IXQ2,NM,NXQ1,NXQ2, -! & KM1,KXZ1,KXQ1,Z1,Q1,KM2,KXZ2,KXQ2,Z2,Q2,J2) -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF COLUMNS -! IXZ1 INTEGER COLUMN SKIP NUMBER FOR Z1 -! IXQ1 INTEGER COLUMN SKIP NUMBER FOR Q1 -! IXZ2 INTEGER COLUMN SKIP NUMBER FOR Z2 -! IXQ2 INTEGER COLUMN SKIP NUMBER FOR Q2 -! NM INTEGER NUMBER OF FIELDS PER COLUMN -! NXQ1 INTEGER FIELD SKIP NUMBER FOR Q1 -! NXQ2 INTEGER FIELD SKIP NUMBER FOR Q2 -! KM1 INTEGER NUMBER OF INPUT POINTS -! KXZ1 INTEGER POINT SKIP NUMBER FOR Z1 -! KXQ1 INTEGER POINT SKIP NUMBER FOR Q1 -! Z1 REAL (1+(IM-1)*IXZ1+(KM1-1)*KXZ1) -! INPUT COORDINATE VALUES IN WHICH TO INTERPOLATE -! (Z1 MUST BE STRICTLY MONOTONIC IN EITHER DIRECTION) -! Q1 REAL (1+(IM-1)*IXQ1+(KM1-1)*KXQ1+(NM-1)*NXQ1) -! INPUT FIELDS TO INTERPOLATE -! KM2 INTEGER NUMBER OF OUTPUT POINTS -! KXZ2 INTEGER POINT SKIP NUMBER FOR Z2 -! KXQ2 INTEGER POINT SKIP NUMBER FOR Q2 -! Z2 REAL (1+(IM-1)*IXZ2+(KM2-1)*KXZ2) -! OUTPUT COORDINATE VALUES TO WHICH TO INTERPOLATE -! (Z2 NEED NOT BE MONOTONIC) -! -! OUTPUT ARGUMENT LIST: -! Q2 REAL (1+(IM-1)*IXQ2+(KM2-1)*KXQ2+(NM-1)*NXQ2) -! OUTPUT INTERPOLATED FIELDS -! J2 REAL (1+(IM-1)*IXQ2+(KM2-1)*KXQ2+(NM-1)*NXQ2) -! OUTPUT INTERPOLATED FIELDS CHANGE WRT Z2 -! -! SUBPROGRAMS CALLED: -! RSEARCH SEARCH FOR A SURROUNDING REAL INTERVAL -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ - IMPLICIT NONE - INTEGER IM,IXZ1,IXQ1,IXZ2,IXQ2,NM,NXQ1,NXQ2 - INTEGER KM1,KXZ1,KXQ1,KM2,KXZ2,KXQ2 - INTEGER I,K1,K2,N - REAL Z1(1+(IM-1)*IXZ1+(KM1-1)*KXZ1) - REAL Q1(1+(IM-1)*IXQ1+(KM1-1)*KXQ1+(NM-1)*NXQ1) - REAL Z2(1+(IM-1)*IXZ2+(KM2-1)*KXZ2) - REAL Q2(1+(IM-1)*IXQ2+(KM2-1)*KXQ2+(NM-1)*NXQ2) - REAL J2(1+(IM-1)*IXQ2+(KM2-1)*KXQ2+(NM-1)*NXQ2) - REAL FFA(IM),FFB(IM),FFC(IM),FFD(IM) - REAL GGA(IM),GGB(IM),GGC(IM),GGD(IM) - INTEGER K1S(IM,KM2) - REAL Z1A,Z1B,Z1C,Z1D,Q1A,Q1B,Q1C,Q1D,Z2S,Q2S,J2S -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! FIND THE SURROUNDING INPUT INTERVAL FOR EACH OUTPUT POINT. - CALL RSEARCH(IM,KM1,IXZ1,KXZ1,Z1,KM2,IXZ2,KXZ2,Z2,1,IM,K1S) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! GENERALLY INTERPOLATE CUBICALLY WITH MONOTONIC CONSTRAINT -! FROM TWO NEAREST INPUT POINTS ON EITHER SIDE OF THE OUTPUT POINT, -! BUT WITHIN THE TWO EDGE INTERVALS INTERPOLATE LINEARLY. -! KEEP THE OUTPUT FIELDS CONSTANT OUTSIDE THE INPUT DOMAIN. - -!!$OMP PARALLEL DO DEFAULT(PRIVATE) SHARED(IM,IXZ1,IXQ1,IXZ2), & -!!$OMP& SHARED(IXQ2,NM,NXQ1,NXQ2,KM1,KXZ1,KXQ1,Z1,Q1,KM2,KXZ2), & -!!$OMP& SHARED(KXQ2,Z2,Q2,J2,K1S) - - DO K2=1,KM2 - DO I=1,IM - K1=K1S(I,K2) - IF(K1.EQ.1.OR.K1.EQ.KM1-1) THEN - Z2S=Z2(1+(I-1)*IXZ2+(K2-1)*KXZ2) - Z1A=Z1(1+(I-1)*IXZ1+(K1-1)*KXZ1) - Z1B=Z1(1+(I-1)*IXZ1+(K1+0)*KXZ1) - FFA(I)=(Z2S-Z1B)/(Z1A-Z1B) - FFB(I)=(Z2S-Z1A)/(Z1B-Z1A) - GGA(I)=1/(Z1A-Z1B) - GGB(I)=1/(Z1B-Z1A) - ELSEIF(K1.GT.1.AND.K1.LT.KM1-1) THEN - Z2S=Z2(1+(I-1)*IXZ2+(K2-1)*KXZ2) - Z1A=Z1(1+(I-1)*IXZ1+(K1-2)*KXZ1) - Z1B=Z1(1+(I-1)*IXZ1+(K1-1)*KXZ1) - Z1C=Z1(1+(I-1)*IXZ1+(K1+0)*KXZ1) - Z1D=Z1(1+(I-1)*IXZ1+(K1+1)*KXZ1) - FFA(I)=(Z2S-Z1B)/(Z1A-Z1B)* & - (Z2S-Z1C)/(Z1A-Z1C)* & - (Z2S-Z1D)/(Z1A-Z1D) - FFB(I)=(Z2S-Z1A)/(Z1B-Z1A)* & - (Z2S-Z1C)/(Z1B-Z1C)* & - (Z2S-Z1D)/(Z1B-Z1D) - FFC(I)=(Z2S-Z1A)/(Z1C-Z1A)* & - (Z2S-Z1B)/(Z1C-Z1B)* & - (Z2S-Z1D)/(Z1C-Z1D) - FFD(I)=(Z2S-Z1A)/(Z1D-Z1A)* & - (Z2S-Z1B)/(Z1D-Z1B)* & - (Z2S-Z1C)/(Z1D-Z1C) - GGA(I)= 1/(Z1A-Z1B)* & - (Z2S-Z1C)/(Z1A-Z1C)* & - (Z2S-Z1D)/(Z1A-Z1D)+ & - (Z2S-Z1B)/(Z1A-Z1B)* & - 1/(Z1A-Z1C)* & - (Z2S-Z1D)/(Z1A-Z1D)+ & - (Z2S-Z1B)/(Z1A-Z1B)* & - (Z2S-Z1C)/(Z1A-Z1C)* & - 1/(Z1A-Z1D) - GGB(I)= 1/(Z1B-Z1A)* & - (Z2S-Z1C)/(Z1B-Z1C)* & - (Z2S-Z1D)/(Z1B-Z1D)+ & - (Z2S-Z1A)/(Z1B-Z1A)* & - 1/(Z1B-Z1C)* & - (Z2S-Z1D)/(Z1B-Z1D)+ & - (Z2S-Z1A)/(Z1B-Z1A)* & - (Z2S-Z1C)/(Z1B-Z1C)* & - 1/(Z1B-Z1D) - GGC(I)= 1/(Z1C-Z1A)* & - (Z2S-Z1B)/(Z1C-Z1B)* & - (Z2S-Z1D)/(Z1C-Z1D)+ & - (Z2S-Z1A)/(Z1C-Z1A)* & - 1/(Z1C-Z1B)* & - (Z2S-Z1D)/(Z1C-Z1D)+ & - (Z2S-Z1A)/(Z1C-Z1A)* & - (Z2S-Z1B)/(Z1C-Z1B)* & - 1/(Z1C-Z1D) - GGD(I)= 1/(Z1D-Z1A)* & - (Z2S-Z1B)/(Z1D-Z1B)* & - (Z2S-Z1C)/(Z1D-Z1C)+ & - (Z2S-Z1A)/(Z1D-Z1A)* & - 1/(Z1D-Z1B)* & - (Z2S-Z1C)/(Z1D-Z1C)+ & - (Z2S-Z1A)/(Z1D-Z1A)* & - (Z2S-Z1B)/(Z1D-Z1B)* & - 1/(Z1D-Z1C) - ENDIF - ENDDO -! INTERPOLATE. - DO N=1,NM - DO I=1,IM - K1=K1S(I,K2) - IF(K1.EQ.0) THEN - Q2S=Q1(1+(I-1)*IXQ1+(N-1)*NXQ1) - J2S=0 - ELSEIF(K1.EQ.KM1) THEN - Q2S=Q1(1+(I-1)*IXQ1+(KM1-1)*KXQ1+(N-1)*NXQ1) - J2S=0 - ELSEIF(K1.EQ.1.OR.K1.EQ.KM1-1) THEN - Q1A=Q1(1+(I-1)*IXQ1+(K1-1)*KXQ1+(N-1)*NXQ1) - Q1B=Q1(1+(I-1)*IXQ1+(K1+0)*KXQ1+(N-1)*NXQ1) - Q2S=FFA(I)*Q1A+FFB(I)*Q1B - J2S=GGA(I)*Q1A+GGB(I)*Q1B - ELSE - Q1A=Q1(1+(I-1)*IXQ1+(K1-2)*KXQ1+(N-1)*NXQ1) - Q1B=Q1(1+(I-1)*IXQ1+(K1-1)*KXQ1+(N-1)*NXQ1) - Q1C=Q1(1+(I-1)*IXQ1+(K1+0)*KXQ1+(N-1)*NXQ1) - Q1D=Q1(1+(I-1)*IXQ1+(K1+1)*KXQ1+(N-1)*NXQ1) - Q2S=FFA(I)*Q1A+FFB(I)*Q1B+FFC(I)*Q1C+FFD(I)*Q1D - J2S=GGA(I)*Q1A+GGB(I)*Q1B+GGC(I)*Q1C+GGD(I)*Q1D - IF(Q2S.LT.MIN(Q1B,Q1C)) THEN - Q2S=MIN(Q1B,Q1C) - J2S=0 - ELSEIF(Q2S.GT.MAX(Q1B,Q1C)) THEN - Q2S=MAX(Q1B,Q1C) - J2S=0 - ENDIF - ENDIF - Q2(1+(I-1)*IXQ2+(K2-1)*KXQ2+(N-1)*NXQ2)=Q2S - J2(1+(I-1)*IXQ2+(K2-1)*KXQ2+(N-1)*NXQ2)=J2S - ENDDO - ENDDO - ENDDO -!!$OMP END PARALLEL DO - - END SUBROUTINE TERP3 - - SUBROUTINE RSEARCH(IM,KM1,IXZ1,KXZ1,Z1,KM2,IXZ2,KXZ2,Z2,IXL2,KXL2,& - L2) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: RSEARCH SEARCH FOR A SURROUNDING REAL INTERVAL -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 98-05-01 -! -! ABSTRACT: THIS SUBPROGRAM SEARCHES MONOTONIC SEQUENCES OF REAL NUMBERS -! FOR INTERVALS THAT SURROUND A GIVEN SEARCH SET OF REAL NUMBERS. -! THE SEQUENCES MAY BE MONOTONIC IN EITHER DIRECTION; THE REAL NUMBERS -! MAY BE SINGLE OR DOUBLE PRECISION; THE INPUT SEQUENCES AND SETS -! AND THE OUTPUT LOCATIONS MAY BE ARBITRARILY DIMENSIONED. -! -! PROGRAM HISTORY LOG: -! 1999-01-05 MARK IREDELL -! -! USAGE: CALL RSEARCH(IM,KM1,IXZ1,KXZ1,Z1,KM2,IXZ2,KXZ2,Z2,IXL2,KXL2, -! & L2) -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF SEQUENCES TO SEARCH -! KM1 INTEGER NUMBER OF POINTS IN EACH SEQUENCE -! IXZ1 INTEGER SEQUENCE SKIP NUMBER FOR Z1 -! KXZ1 INTEGER POINT SKIP NUMBER FOR Z1 -! Z1 REAL (1+(IM-1)*IXZ1+(KM1-1)*KXZ1) -! SEQUENCE VALUES TO SEARCH -! (Z1 MUST BE MONOTONIC IN EITHER DIRECTION) -! KM2 INTEGER NUMBER OF POINTS TO SEARCH FOR -! IN EACH RESPECTIVE SEQUENCE -! IXZ2 INTEGER SEQUENCE SKIP NUMBER FOR Z2 -! KXZ2 INTEGER POINT SKIP NUMBER FOR Z2 -! Z2 REAL (1+(IM-1)*IXZ2+(KM2-1)*KXZ2) -! SET OF VALUES TO SEARCH FOR -! (Z2 NEED NOT BE MONOTONIC) -! IXL2 INTEGER SEQUENCE SKIP NUMBER FOR L2 -! KXL2 INTEGER POINT SKIP NUMBER FOR L2 -! -! OUTPUT ARGUMENT LIST: -! L2 INTEGER (1+(IM-1)*IXL2+(KM2-1)*KXL2) -! INTERVAL LOCATIONS HAVING VALUES FROM 0 TO KM1 -! (Z2 WILL BE BETWEEN Z1(L2) AND Z1(L2+1)) -! -! SUBPROGRAMS CALLED: -! SBSRCH ESSL BINARY SEARCH -! DBSRCH ESSL BINARY SEARCH -! -! REMARKS: -! IF THE ARRAY Z1 IS DIMENSIONED (IM,KM1), THEN THE SKIP NUMBERS ARE -! IXZ1=1 AND KXZ1=IM; IF IT IS DIMENSIONED (KM1,IM), THEN THE SKIP -! NUMBERS ARE IXZ1=KM1 AND KXZ1=1; IF IT IS DIMENSIONED (IM,JM,KM1), -! THEN THE SKIP NUMBERS ARE IXZ1=1 AND KXZ1=IM*JM; ETCETERA. -! SIMILAR EXAMPLES APPLY TO THE SKIP NUMBERS FOR Z2 AND L2. -! -! RETURNED VALUES OF 0 OR KM1 INDICATE THAT THE GIVEN SEARCH VALUE -! IS OUTSIDE THE RANGE OF THE SEQUENCE. -! -! IF A SEARCH VALUE IS IDENTICAL TO ONE OF THE SEQUENCE VALUES -! THEN THE LOCATION RETURNED POINTS TO THE IDENTICAL VALUE. -! IF THE SEQUENCE IS NOT STRICTLY MONOTONIC AND A SEARCH VALUE IS -! IDENTICAL TO MORE THAN ONE OF THE SEQUENCE VALUES, THEN THE -! LOCATION RETURNED MAY POINT TO ANY OF THE IDENTICAL VALUES. -! -! TO BE EXACT, FOR EACH I FROM 1 TO IM AND FOR EACH K FROM 1 TO KM2, -! Z=Z2(1+(I-1)*IXZ2+(K-1)*KXZ2) IS THE SEARCH VALUE AND -! L=L2(1+(I-1)*IXL2+(K-1)*KXL2) IS THE LOCATION RETURNED. -! IF L=0, THEN Z IS LESS THAN THE START POINT Z1(1+(I-1)*IXZ1) -! FOR ASCENDING SEQUENCES (OR GREATER THAN FOR DESCENDING SEQUENCES). -! IF L=KM1, THEN Z IS GREATER THAN OR EQUAL TO THE END POINT -! Z1(1+(I-1)*IXZ1+(KM1-1)*KXZ1) FOR ASCENDING SEQUENCES -! (OR LESS THAN OR EQUAL TO FOR DESCENDING SEQUENCES). -! OTHERWISE Z IS BETWEEN THE VALUES Z1(1+(I-1)*IXZ1+(L-1)*KXZ1) AND -! Z1(1+(I-1)*IXZ1+(L-0)*KXZ1) AND MAY EQUAL THE FORMER. -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ -! IMPLICIT NONE -! INTEGER,INTENT(IN):: IM,KM1,IXZ1,KXZ1,KM2,IXZ2,KXZ2,IXL2,KXL2 -! REAL,INTENT(IN):: Z1(1+(IM-1)*IXZ1+(KM1-1)*KXZ1) -! REAL,INTENT(IN):: Z2(1+(IM-1)*IXZ2+(KM2-1)*KXZ2) -! INTEGER,INTENT(OUT):: L2(1+(IM-1)*IXL2+(KM2-1)*KXL2) -! INTEGER(4) INCX,N,INCY,M,INDX(KM2),RC(KM2),IOPT -! INTEGER I,K2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! FIND THE SURROUNDING INPUT INTERVAL FOR EACH OUTPUT POINT. -! DO I=1,IM -! IF(Z1(1+(I-1)*IXZ1).LE.Z1(1+(I-1)*IXZ1+(KM1-1)*KXZ1)) THEN -! INPUT COORDINATE IS MONOTONICALLY ASCENDING. -! INCX=KXZ2 -! N=KM2 -! INCY=KXZ1 -! M=KM1 -! IOPT=1 -! IF(DIGITS(1.).LT.DIGITS(1._8)) THEN -! CALL SBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ELSE -! CALL DBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ENDIF -! DO K2=1,KM2 -! L2(1+(I-1)*IXL2+(K2-1)*KXL2)=INDX(K2)-RC(K2) -! ENDDO -! ELSE -! INPUT COORDINATE IS MONOTONICALLY DESCENDING. -! INCX=KXZ2 -! N=KM2 -! INCY=-KXZ1 -! M=KM1 -! IOPT=0 -! IF(DIGITS(1.).LT.DIGITS(1._8)) THEN -! CALL SBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ELSE -! CALL DBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ENDIF -! DO K2=1,KM2 -! L2(1+(I-1)*IXL2+(K2-1)*KXL2)=KM1+1-INDX(K2) -! ENDDO -! ENDIF -! ENDDO -! - IMPLICIT NONE - INTEGER,INTENT(IN):: IM,KM1,IXZ1,KXZ1,KM2,IXZ2,KXZ2,IXL2,KXL2 - REAL,INTENT(IN):: Z1(1+(IM-1)*IXZ1+(KM1-1)*KXZ1) - REAL,INTENT(IN):: Z2(1+(IM-1)*IXZ2+(KM2-1)*KXZ2) - INTEGER,INTENT(OUT):: L2(1+(IM-1)*IXL2+(KM2-1)*KXL2) - INTEGER I,K2,L - REAL Z -!C - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -!C FIND THE SURROUNDING INPUT INTERVAL FOR EACH OUTPUT POINT. - DO I=1,IM - IF(Z1(1+(I-1)*IXZ1).LE.Z1(1+(I-1)*IXZ1+(KM1-1)*KXZ1)) THEN -!C INPUT COORDINATE IS MONOTONICALLY ASCENDING. - DO K2=1,KM2 - Z=Z2(1+(I-1)*IXZ2+(K2-1)*KXZ2) - L=0 - DO - IF(Z.LT.Z1(1+(I-1)*IXZ1+L*KXZ1)) EXIT - L=L+1 - IF(L.EQ.KM1) EXIT - ENDDO - L2(1+(I-1)*IXL2+(K2-1)*KXL2)=L - ENDDO - ELSE -!C INPUT COORDINATE IS MONOTONICALLY DESCENDING. - DO K2=1,KM2 - Z=Z2(1+(I-1)*IXZ2+(K2-1)*KXZ2) - L=0 - DO - IF(Z.GT.Z1(1+(I-1)*IXZ1+L*KXZ1)) EXIT - L=L+1 - IF(L.EQ.KM1) EXIT - ENDDO - L2(1+(I-1)*IXL2+(K2-1)*KXL2)=L - ENDDO - ENDIF - ENDDO - - END SUBROUTINE RSEARCH - - SUBROUTINE VINTG(IM,KM1,KM2,NT,P1,U1,V1,T1,Q1,W1,P2, & - U2,V2,T2,Q2,W2) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: VINTG VERTICALLY INTERPOLATE UPPER-AIR FIELDS -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 92-10-31 -! -! ABSTRACT: VERTICALLY INTERPOLATE UPPER-AIR FIELDS. -! WIND, TEMPERATURE, HUMIDITY AND OTHER TRACERS ARE INTERPOLATED. -! THE INTERPOLATION IS CUBIC LAGRANGIAN IN LOG PRESSURE -! WITH A MONOTONIC CONSTRAINT IN THE CENTER OF THE DOMAIN. -! IN THE OUTER INTERVALS IT IS LINEAR IN LOG PRESSURE. -! OUTSIDE THE DOMAIN, FIELDS ARE GENERALLY HELD CONSTANT, -! EXCEPT FOR TEMPERATURE AND HUMIDITY BELOW THE INPUT DOMAIN, -! WHERE THE TEMPERATURE LAPSE RATE IS HELD FIXED AT -6.5 K/KM AND -! THE RELATIVE HUMIDITY IS HELD CONSTANT. -! -! PROGRAM HISTORY LOG: -! 91-10-31 MARK IREDELL -! -! USAGE: CALL VINTG(IM,KM1,KM2,NT,P1,U1,V1,T1,Q1,P2, -! & U2,V2,T2,Q2) -! INPUT ARGUMENT LIST: -! IM INTEGER NUMBER OF POINTS TO COMPUTE -! KM1 INTEGER NUMBER OF INPUT LEVELS -! KM2 INTEGER NUMBER OF OUTPUT LEVELS -! NT INTEGER NUMBER OF TRACERS -! P1 REAL (IM,KM1) INPUT PRESSURES -! ORDERED FROM BOTTOM TO TOP OF ATMOSPHERE -! U1 REAL (IM,KM1) INPUT ZONAL WIND -! V1 REAL (IM,KM1) INPUT MERIDIONAL WIND -! T1 REAL (IM,KM1) INPUT TEMPERATURE (K) -! Q1 REAL (IM,KM1,NT) INPUT TRACERS (HUMIDITY FIRST) -! P2 REAL (IM,KM2) OUTPUT PRESSURES -! OUTPUT ARGUMENT LIST: -! U2 REAL (IM,KM2) OUTPUT ZONAL WIND -! V2 REAL (IM,KM2) OUTPUT MERIDIONAL WIND -! T2 REAL (IM,KM2) OUTPUT TEMPERATURE (K) -! Q2 REAL (IM,KM2,NT) OUTPUT TRACERS (HUMIDITY FIRST) -! -! SUBPROGRAMS CALLED: -! TERP3 CUBICALLY INTERPOLATE IN ONE DIMENSION -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN -! -!C$$$ - IMPLICIT NONE - - INTEGER, INTENT(IN) :: IM, KM1, KM2, NT - - REAL, INTENT(IN) :: P1(IM,KM1),U1(IM,KM1),V1(IM,KM1) - REAL, INTENT(IN) :: T1(IM,KM1),Q1(IM,KM1,NT) - REAL, INTENT(IN) :: W1(IM,KM1),P2(IM,KM2) - REAL, INTENT(OUT) :: U2(IM,KM2),V2(IM,KM2) - REAL, INTENT(OUT) :: T2(IM,KM2),Q2(IM,KM2,NT) - REAL, INTENT(OUT) :: W2(IM,KM2) - - REAL, PARAMETER :: DLTDZ=-6.5E-3*287.05/9.80665 - REAL, PARAMETER :: DLPVDRT=-2.5E6/461.50 - - INTEGER :: I, K, N - - REAL :: DZ - REAL,ALLOCATABLE :: Z1(:,:),Z2(:,:) - REAL,ALLOCATABLE :: C1(:,:,:),C2(:,:,:),J2(:,:,:) - - ALLOCATE (Z1(IM+1,KM1),Z2(IM+1,KM2)) - ALLOCATE (C1(IM+1,KM1,4+NT),C2(IM+1,KM2,4+NT),J2(IM+1,KM2,4+NT)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE LOG PRESSURE INTERPOLATING COORDINATE -! AND COPY INPUT WIND, TEMPERATURE, HUMIDITY AND OTHER TRACERS -!$OMP PARALLEL DO PRIVATE(K,I) - DO K=1,KM1 - DO I=1,IM - Z1(I,K) = -LOG(P1(I,K)) - C1(I,K,1) = U1(I,K) - C1(I,K,2) = V1(I,K) - C1(I,K,3) = W1(I,K) - C1(I,K,4) = T1(I,K) - C1(I,K,5) = Q1(I,K,1) - ENDDO - ENDDO -!$OMP END PARALLEL DO - DO N=2,NT - DO K=1,KM1 - DO I=1,IM - C1(I,K,4+N) = Q1(I,K,N) - ENDDO - ENDDO - ENDDO -!$OMP PARALLEL DO PRIVATE(K,I) - DO K=1,KM2 - DO I=1,IM - Z2(I,K) = -LOG(P2(I,K)) - ENDDO - ENDDO -!$OMP END PARALLEL DO -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! PERFORM LAGRANGIAN ONE-DIMENSIONAL INTERPOLATION -! THAT IS 4TH-ORDER IN INTERIOR, 2ND-ORDER IN OUTSIDE INTERVALS -! AND 1ST-ORDER FOR EXTRAPOLATION. - CALL TERP3(IM,1,1,1,1,4+NT,(IM+1)*KM1,(IM+1)*KM2, & - KM1,IM+1,IM+1,Z1,C1,KM2,IM+1,IM+1,Z2,C2,J2) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COPY OUTPUT WIND, TEMPERATURE, HUMIDITY AND OTHER TRACERS -! EXCEPT BELOW THE INPUT DOMAIN, LET TEMPERATURE INCREASE WITH A FIXED -! LAPSE RATE AND LET THE RELATIVE HUMIDITY REMAIN CONSTANT. - DO K=1,KM2 - DO I=1,IM - U2(I,K)=C2(I,K,1) - V2(I,K)=C2(I,K,2) - W2(I,K)=C2(I,K,3) - DZ=Z2(I,K)-Z1(I,1) - IF(DZ.GE.0) THEN - T2(I,K)=C2(I,K,4) - Q2(I,K,1)=C2(I,K,5) - ELSE - T2(I,K)=T1(I,1)*EXP(DLTDZ*DZ) - Q2(I,K,1)=Q1(I,1,1)*EXP(DLPVDRT*(1/T2(I,K)-1/T1(I,1))-DZ) - ENDIF - ENDDO - ENDDO - DO N=2,NT - DO K=1,KM2 - DO I=1,IM - Q2(I,K,N)=C2(I,K,4+N) - ENDDO - ENDDO - ENDDO - DEALLOCATE (Z1,Z2,C1,C2,J2) - END SUBROUTINE VINTG - end module utils diff --git a/sorc/fbwndgfs.fd/fbwndgfs.f b/sorc/fbwndgfs.fd/fbwndgfs.f deleted file mode 100755 index ce7505fd1b..0000000000 --- a/sorc/fbwndgfs.fd/fbwndgfs.f +++ /dev/null @@ -1,969 +0,0 @@ -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C . . . . -C MAIN PROGRAM: FBWNDGFS -C PRGMMR: VUONG ORG: NP11 DATE: 2005-08-03 -C -C ABSTRACT: THIS PROGRAM CREATES BULLETINS OF FORECAST WINDS AND -C TEMPS FOR UP TO 15 LEVELS FOR PACIFIC REGION. -C THE PRIMARY (RMP) IS RUN. THE PROGRAM SETUPS TO RUN 4 TIMES PER -C DAY (T00Z, T06Z, T12Z AND T18Z). -C EACH BULLETIN OF A SET REPRESENTS A 6, 12 OR 24 HR FCST. -C THE PROGRAM GENERATED ARE THE FOLLOWING BULLETINS; -C FBOC31, FBOC33, FBOC35, FBOC37, FBOC38, FBOC39 -C THE STATION FILE (FBWNDGFS.STNLIST) IS KEYED TO INDICATE WHICH BULLETIN -C EACH STATION BELONGS IN. THE WIND SPEED (TEN OF DEGREES), WIND DIRECTION -C (KNOTS) & TEMPERATURE(CELSIUS) IN THE FORM (DDff+TT) FOR EACH STATION -C AND LEVELS APPEAR IN THE BULLETIN. WHERE DD IS THE WIND DIRECTION, -C ff IS THE WIND SPEED, AND TT IS THE TEMPERATURE -C THE FORECAST INPUT DATA IS GFS GLOBAL LAT/LON GRID 128 (0.313 DEGREE) -C FORECAST FILES U,V,& T FIELDS, 15 LEVELS: 1000', 1500', 2000', 3000', -C 6000', 9000', 12000', 15000' + 500, 400, 300, 250, 200, 150 AND 100MB -C -C THE INPUT STATION RECORD FOR EACH STATION CONTAINS STN ELEVATION -C AND LATITUDE/LONGITUDE POSITION. -C -C PROGRAM HISTORY LOG: -C 1986-01-03 CAVANAUGH -C 2004-06-29 VUONG MODIFIED THE PROGRAM TO WORK WITH GFS DATA AND -C RUN 4 TIMES PER DAY (T00Z,T06Z,T12Z AND T18Z). -C 2005-08-03 VUONG CHANGED THE FOR USE TIMES SPECIFIED ON WIND AND -C TEMPERATURE ALOFT 6 AND 12 HOUR FORECAST BULLETINS -C 2007-07-03 VUONG CHANGED NO. OF POINTS FOR GFS GLOBAL GAUSSIAN -C LAT/LON GRID 128 -C 2010-05-26 VUONG CHANGED NO. OF POINTS FOR GFS (T574) GAUSSIAN -C LAT/LON GRID -C 2012-08-16 VUONG MODIFIED VARIABLES NNPOS AND CHANGED -C VARIABLE ENVVAR TO CHARACTER*6 -C 2016-05-16 VUONG MODIFIED CODE TO USE MODULE GDSWZD_MOD IN IP.v3.0.0 -C -C USAGE: -C INPUT FILES: -C FORT.05 FBWNDGFS.STNLIST STATION DIRECTORY -C -C - GFS (T574) GLOBAL GAUSSIAN LAT/LON GRID (0.205 DEGREE) -C DIMENSIONS 1760 x 880 = 1548800 -C FORT.11 /COM/GFS/PROD/GFS.${PDY}/GFS.${CYCLE}.MASTER.GRBF06 -C FORT.12 /COM/GFS/PROD/GFS.${PDY}/GFS.${CYCLE}.MASTER.GRBF12 -C FORT.13 /COM/GFS/PROD/GFS.${PDY}/GFS.${CYCLE}.MASTER.GRBF24 -C - GFS INDEX FILES FOR GRIB GRID 128: -C FORT.31 /COM/GFS/PROD/GFS.${PDY}/GFS.${CYCLE}.MASTER.GRBIF06 -C FORT.32 /COM/GFS/PROD/GFS.${PDY}/GFS.${CYCLE}.MASTER.GRBIF12 -C FORT.33 /COM/GFS/PROD/GFS.${PDY}/GFS.${CYCLE}.MASTER.GRBIF24 -C -C WHERE PDY = YYYYMMDD, YYYY IS THE YEAR, MM IS THE MONTH, -C DD IS THE DAY OF THE MONTH -C AND -C CYCLE = T00Z, T06Z, T12Z, T18Z -C -C OUTPUT FILES: -C FORT.06 ERROR MESSAGES -C FORT.51 BULLETIN RECORDS FOR TRANSMISSION -C -C SUBPROGRAMS CALLED: (LIST ALL CALLED FROM ANYWHERE IN CODES) -C LIBRARY: -C W3AI15 WXAI19 W3FC05 W3FI01 -C GETGB (FOR GRIB FILES) -C W3FT01 W3TAGE XMOVEX XSTORE W3UTCDAT -C -C EXIT STATES: -C COND = 110 STN DIRECTORY READ ERR (CONSOLE MSG) -C 1050 NO DATA (FIELD ID IS PRINTED)(FT06 + CONSOLE) -C 1060 NO DATA (FIELD ID IS PRINTED)(FT06 + CONSOLE) -C 1070 NO DATA (FIELD ID IS PRINTED)(FT06 + CONSOLE) -C 1080 NO DATA (FIELD ID IS PRINTED)(FT06 + CONSOLE) -C 1090 NO DATA (FIELD ID IS PRINTED)(FT06 + CONSOLE) -C ALL ARE FATAL -C PLUS W3LIB SUB-RTN RETURN CODES -C -C ATTRIBUTES: -C LANGUAGE: F90 FORTRAN -C MACHINE: IBM WCOSS -C -C$$$ -C - use gdswzd_mod - PARAMETER (NPTS=1548800) - PARAMETER (MAXSTN=800) - PARAMETER (IMAX=1760,JMAX=880) -C - REAL ALAT(MAXSTN),ALON(MAXSTN) - REAL ISTN(MAXSTN),JSTN(MAXSTN) - REAL ERAS(3),FHOUR,FILL - REAL RFLD(NPTS),RINTRP(IMAX,JMAX) - REAL XPTS(NPTS),YPTS(NPTS),RLON(NPTS),RLAT(NPTS) -C -C...MAX NR STNS FOR READ-END SHOULD BE GT ACTUAL NR OF STNS ON STN FILE - INTEGER IELEV(MAXSTN),IRAS(3),KTAU(3) - INTEGER JTIME(8),NDATE(8),MDATE(8) - INTEGER JGDS(100),KGDS(200),JREW,KBYTES - INTEGER KPDS(27),MPDS(27),KREW - INTEGER KSTNU(MAXSTN,15) - INTEGER LMTLWR(2),LMTUPR(2),NTTT -C...NPOS(ITIVE) IS TRANSMISSION SIGN 7C MASK FOR TEMP - INTEGER ICKYR,ICKMO,ICKDAY,ICKHR - INTEGER KSTNV(MAXSTN,15),KSTNT(MAXSTN,15) - INTEGER IDWD1H(3),IDWD2H(3) - INTEGER IDWD1P(3),IDWD2P(3) - INTEGER IDWD2(15),NHGTP(15) -C -C...S,L,T,B ARE SUBSCRIPTS FOR SEQ NR OF STATION, LEVEL, TAU, BULLETIN -C... B IS COUNT OF BULTNS WITHIN TAU, BB IS COUNT WITHIN RUN -C - INTEGER S,L,T,B, BB -C - CHARACTER*6 NHGT6(15), AWIPSID(6) - CHARACTER*1 BSTART,BEND - CHARACTER*1 BULTN(1280) - CHARACTER*1 SPACE(1280) - CHARACTER*1 ETBETX,ETB,ETX,ICK,ICKX - CHARACTER*1 INDIC(MAXSTN),LF,MINUS - CHARACTER*1 MUSES(MAXSTN) - CHARACTER*1 SPC80(80),TSRCE,TMODE,TFLAG - CHARACTER*3 CRCRLF - CHARACTER*4 ITRTIM,STNID(MAXSTN),IVALDA - CHARACTER*1 NNPOS - CHARACTER*4 NFDHDG(6),NCATNR(6),NVALTM(12) - CHARACTER*9 NUSETM(12) -C - CHARACTER*8 IBLANK,IBSDA,IBSTI,ITRDA - CHARACTER*8 ITEMP(MAXSTN,15),IWIND(MAXSTN,15) - CHARACTER*8 NFILE,NTTT4,RF06,RF12,RF24 - CHARACTER*6 ENVVAR - CHARACTER*80 FILEB,FILEI,SPCS,FILEO -C - CHARACTER*86 LINE73 - CHARACTER*40 LN73A,NBUL1 - CHARACTER*46 LN73B - CHARACTER*84 NBULHD - CHARACTER*34 NBUL2 - CHARACTER*32 NBASHD - CHARACTER*60 NVALHD -C - LOGICAL ENDBUL,KBMS(NPTS) -C - EQUIVALENCE (ICK,ICKX) - EQUIVALENCE (RFLD(1),RINTRP(1,1)) - EQUIVALENCE (NBULHD(1:1),NBUL1(1:1)) - EQUIVALENCE (NBULHD(41:41),NBUL2(1:1)) - EQUIVALENCE (LINE73(1:1),LN73A(1:1)) - EQUIVALENCE (LINE73(41:41),LN73B(1:1)) - EQUIVALENCE (SPCS,SPC80) - EQUIVALENCE (NTTT,NTTT4(1:1)) -C - DATA INDEX /1/ - DATA NCYCLK/ 0 / - DATA LIN / 0 / - DATA FHOUR /24.0/ - DATA KTAU /06,12,24/ - DATA LMTLWR/1,11/ - DATA LMTUPR/10,15/ - DATA IDWD1H/ 33, 34, 11/ - DATA IDWD2H/ 103, 103, 103/ - - DATA IDWD1P/ 33, 34, 11/ - DATA IDWD2P/ 100, 100, 100/ - - DATA IDWD2 / 305, 457, 610, 914, - 1 1829, 2743, 3658, 4572, - 2 500, 400, 300, 250, - 3 200, 150, 100/ - - DATA NHGT6 /'1000 ','1500 ','2000 ','3000 ', - 1 '6000 ','9000 ','12000 ','15000 ', - 2 '18000 ','24000 ','30000 ','34000 ', - 3 '39000 ','45000 ','53000'/ - DATA NHGTP /5,5,6,6,6,6,6,6,6,6,5,5,5,5,5/ - DATA BSTART/'B'/ - DATA BEND /'E'/ - DATA ETB /'>'/ - DATA ETX /'%'/ - DATA MINUS /'-'/ - DATA SPC80 /80*' '/ - DATA CRCRLF/'<<@'/ - DATA IBLANK/' '/ - DATA AWIPSID / 'FD1OC1','FD8OC7','FD3OC3', - 1 'FD9OC8','FD5OC5','FD0OC9'/ - DATA NFDHDG/ - 1 'OC31','OC37','OC33','OC38','OC35','OC39'/ - DATA NCATNR/ - 1 '1377','5980','1378','5981','1379','5982'/ - DATA NVALTM/ - 1 '0600','1200','0000','1200','1800','0600', - 2 '1800','0000','1200','0000','0600','1800'/ - DATA NUSETM/ - 1 '0200-0900','0900-1800','1800-0600', - 2 '0800-1500','1500-0000','0000-1200', - 3 '1400-2100','2100-0600','0600-1800', - 4 '2000-0300','0300-1200','1200-0000'/ -C - DATA RF06 /'6 HOURS '/ - DATA RF12 /'12 HOURS'/ - DATA RF24 /'24 HOURS'/ - DATA LN73A /' '/ - DATA LN73B /' <<@^^^'/ - DATA NBUL1 / - 1 '''10 PFB '/ - DATA NBUL2/ - 1 'FB KWNO <<@^^^ <<@$'/ - DATA NBASHD/'DATA BASED ON Z <<@@^^^'/ - DATA NVALHD/ - 1 'VALID Z FOR USE - Z. TEMPS NEG ABV 24000<<@@^'/ -C -C - NNPOS = CHAR(124) - LUGO = 51 - CALL W3TAGB('FBWNDGFS',2012,0184,0184,'NP11') - ENVVAR='FORT ' - WRITE(ENVVAR(5:6),FMT='(I2)') LUGO - CALL GETENV(ENVVAR,FILEO) -C - OPEN(LUGO,FILE=FILEO,ACCESS='DIRECT',RECL=1281) - IREC=1 -C...GET COMPUTER DATE-TIME & SAVE FOR DATA DATE VERIFICATION - CALL W3UTCDAT(JTIME) -C -C...READ AND STORE STATION LIST FROM UNIT 5 -C...INDIC = INDICATOR BEGIN, OR END, BULTN ('B' OR 'E') -C...MUSES = USED IN MULTIPLE BULTNS (FOR SAME TAU) IF '+' -C - DO 25 I = 1, MAXSTN - READ(5,10,ERR=109,END=130) INDIC(I),MUSES(I),STNID(I), - & IELEV(I),ALAT(I),ALON(I) - 25 CONTINUE -C -C/////////////////////////////////////////////////////////////////// - 10 FORMAT(A1,A1,A4,1X,I5,1X,F6.2,1X,F7.2) -C -C...ERROR - 109 CONTINUE - CALL W3TAGE('FBWNDGFS') - PRINT *,'STATION LIST READ ERROR' - CALL ERREXIT (110) -C//////////////////////////////////////////////////////////////////// -C - 130 CONTINUE -C -C CONVERT THE LAT/LONG COORDINATES OF STATION TO LAMBERT -C CONFORMAL PROJECTION I,J COORDINATES FOR GRID 221 -C - NRSTNS = I-1 - WRITE(6,'(A19,1X,I0)') ' NO. OF STATIONS = ',NRSTNS -C -C...END READ. COUNT OF STATIONS STORED -C -C...GET EXEC PARMS -C...PARM FIELD TAKEN OUT, NEXT 4 VALUES HARD WIRED - TMODE = 'M' - TSRCE = 'R' - TFLAG = 'P' - PRINT *,'SOURCE=',TSRCE,' MODE=',TMODE,' FLAG=',TFLAG -C -C********************************************************************** -C -C...READ PACKED DATA, UNPACK, INTERPOLATE, STORE IN STATION ARRAYS, -C... CREATE BULTN HDGS, INSERT STATION IN BULTNS, & WRITE BULTNS. -C - BB = 0 -C -C...BEGIN TAU -C - DO 7000 ITAU=1, 3 -C - WRITE(6,'(A6,1X,I0)') ' ITAU=',ITAU - T = ITAU -C -C SELECT FILE FOR TAU PERIOD (PRIMARY RUN) -C - IF (KTAU(ITAU).EQ.6) THEN - NFILE = RF06 - LUGB = 11 - LUGI = 31 - ELSE IF (KTAU(ITAU).EQ.12) THEN - NFILE = RF12 - LUGB = 12 - LUGI = 32 - ELSE - NFILE = RF24 - LUGB = 13 - LUGI = 33 - ENDIF -C - WRITE(ENVVAR(5:6),FMT='(I2)') LUGB - CALL GETENV(ENVVAR,FILEB) - CALL BAOPENR(LUGB,FILEB,IRET) - WRITE(ENVVAR(5:6),FMT='(I2)') LUGI - CALL GETENV(ENVVAR,FILEI) - CALL BAOPENR(LUGI,FILEI,IRET) - PRINT 1025,NFILE, FILEB, FILEI - 1025 FORMAT('NFILE= ',A8,2X,'GRIB FILE= ',A55,'INDEX FILE= ',A55) -C -C.................................. - DO 2450 ITYP=1,3 -C -C... SEE O.N. 388 FOR FILE ID COMPOSITION -C - DO 2400 L=1,15 -C -C...USE SOME OF THE VALUES IN THE PDS TO GET RECORD -C -C MPDS = -1 SETS ARRAY MPDS TO -1 -C MPDS(3) = GRID IDENTIFICATION (PDS BYTE 7) -C MPDS(5) = INDICATOR OF PARAMETER (PDS BYTE 9) -C MPDS(6) = INDICATOR OF TYPE OF LEVEL OR LAYER (PDS BYTE 10) -C MPDS(7) = HGT,PRES,ETC. OF LEVEL OR LAYER (PDS BYTE 11,12) -C MPDS(14) = P1 - PERIOD OF TIME (PDS BYTE 19) -C VALUES NOT SET TO -1 ARE USED TO FIND RECORD -C - JREW = 0 - KREW = 0 - MPDS = -1 -C -C MPDS(3) = -1 - IF (L.LE.8) THEN - MPDS(5) = IDWD1H(ITYP) -C... HEIGHT ABOVE MEAN SEA LEVEL GPML - MPDS(6) = IDWD2H(ITYP) - ELSE - MPDS(5) = IDWD1P(ITYP) -C... PRESSURE IN HectoPascals (hPa) ISBL - MPDS(6) = IDWD2P(ITYP) - ENDIF - MPDS(7) = IDWD2(L) - MPDS(14) = KTAU(ITAU) -C -C... THE FILE ID COMPLETED. -C PRINT *,MPDS -C... GET THE DATA FIELD. -C - CALL GETGB(LUGB,LUGI,NPTS,JREW,MPDS,JGDS, - & KBYTES,KREW,KPDS,KGDS,KBMS,RFLD,IRET) -C WRITE(*,119) KPDS -119 FORMAT( 1X, 'MAIN: KPDS:', 3(/1X,10(I5,2X) ) ) - -C -C/////////////////////////////////////////////////////////////////////// -C...ERROR - IF (IRET.NE.0) THEN - write(*,120) (MPDS(I),I=3,14) -120 format(1x,' MPDS = ',12(I0,1x)) - WRITE(6,'(A9,1X,I0)') ' IRET = ',IRET - IF (IRET.EQ.96) THEN - PRINT *,'ERROR READING INDEX FILE' - CALL W3TAGE('FBWNDGFS') - CALL ERREXIT (1050) - ELSE IF (IRET.EQ.97) THEN - PRINT *,'ERROR READING GRIB FILE' - CALL W3TAGE('FBWNDGFS') - CALL ERREXIT (1060) - ELSE IF (IRET.EQ.98) THEN - PRINT *,'NUMBER OF DATA POINT GREATER', - * ' THAN NPTS' - CALL W3TAGE('FBWNDGFS') - CALL ERREXIT (1070) - ELSE IF (IRET.EQ.99) THEN - PRINT *,'RECORD REQUESTED NOT FOUND' - CALL W3TAGE('FBWNDGFS') - CALL ERREXIT (1080) - ELSE - PRINT *,'GETGB-W3FI63 GRIB UNPACKER', - * ' RETURN CODE' - CALL W3TAGE('FBWNDGFS') - CALL ERREXIT (1090) - END IF - ENDIF -C -C...GET DATE-TIME FOR LATER BULTN HDG PROCESSING -C - ICKYR = KPDS(8) + 2000 - ICKMO = KPDS(9) - ICKDAY= KPDS(10) - ICKHR = KPDS(11) * 100 - IF (ICKHR.EQ.0000) ICYC=1 - IF (ICKHR.EQ.0600) ICYC=2 - IF (ICKHR.EQ.1200) ICYC=3 - IF (ICKHR.EQ.1800) ICYC=4 - IBSTIM=ICKHR -C -C...GET NEXT DAY - FOR VALID DAY AND 12Z AND 18Z BACKUP TRAN DAY -C...UPDATE TO NEXT DAY - NHOUR=ICKHR*.01 - CALL W3MOVDAT((/0.,FHOUR,0.,0.,0./), - & (/ICKYR,ICKMO,ICKDAY,0,NHOUR,0,0,0/),NDATE) - CALL W3MOVDAT((/0.,FHOUR,0.,0.,0./),NDATE,MDATE) -C -C...12Z, 18Z CYCLE,BACKUP RUN,24HR FCST: VALID DAY IS DAY-AFTER-NEXT -C...NEXT DAY-OF-MONTH NOW STORED IN 'NDATE(3)' -C...NEXT DAY PLUS 1 IN 'MDATE(3)' -C -C CONVERT EARTH COORDINATES OF STATION TO GRID COORDINATES - DO 110 J = 1,NRSTNS -C CALL GDSWIZ(KGDS,-1,1,FILL,XPTS(J),YPTS(J), -C & ALON(J),ALAT(J),IRET,0,DUM,DUM) - CALL GDSWZD(KGDS,-1,1,FILL,XPTS(J),YPTS(J), - & ALON(J),ALAT(J),IRET) - ISTN(J) = XPTS(J) - JSTN(J) = YPTS(J) -C PRINT 111,STNID(J),ALAT(J),ALON(J),ISTN(J),JSTN(J) - 111 FORMAT (3X,A3,2(2X,F8.2),2(2X,F8.3)) - 110 CONTINUE -C -C...CONVERT DATA TO CONVENTIONAL UNITS: -C... WIND FROM METERS/SEC TO KNOTS (2 DIGITS), -C WIND DIRECTION IN TENS OF DEGREES (2 DIGITS), -C AND TEMP FROM K TO CELSIUS (2 DIGITS) -C - DO 1500 I=1,NPTS -C - IF (ITYP.EQ.3) THEN - RFLD(I)=RFLD(I)-273.15 - ELSE - RFLD(I)=RFLD(I)*1.94254 - ENDIF -C - 1500 CONTINUE -C - DO 2300 S=1,NRSTNS -C -C INTERPOLATE GRIDPOINT DATA TO STATION. -C - CALL W3FT01(ISTN(S),JSTN(S),RINTRP,X,IMAX,JMAX,NCYCLK,LIN) -C WRITE(6,830) STNID(S),ISTN(S),JSTN(S),X -830 FORMAT(1X,'STN-ID = ', A4,3X,'SI,SJ = ', 2(F5.1,2X), 1X, - A 'X = ', F10.0) -C -C...INTERPOLATION COMPLETE FOR THIS STATION -C -C...CONVERT WIND, U AND V TO INTEGER -C - IF (ITYP.EQ.1) THEN - KSTNU(S,L)=X*100.0 - ELSE IF (ITYP.EQ.2) THEN - KSTNV(S,L)=X*100.0 -C...CONVERT TEMP TO I*2 - ELSE IF (ITYP.EQ.3) THEN - KSTNT(S,L)=X*100.0 - ENDIF -C - 2300 CONTINUE -C...END OF STATION LOOP -C................................... -C - 2400 CONTINUE -C...END OF LEVEL LOOP -C................................... -C - 2450 CONTINUE -C...END OF DATA TYPE LOOP -C................................... -C -C...INTERPOLATED DATA FOR ALL STATIONS,1 TAU, NOW ARRAYED IN KSTNU-V-T. -C*********************************************************************** -C -C...CONVERT WIND COMPONENTS TO DIRECTION AND SPEED -C -C................................. -C...BEGIN STATION -C - DO 3900 S=1,NRSTNS -C................................. - DO 3750 L=1,15 -C -C...PUT U & V WIND COMPONENTS IN I*4 WORK AREA - IRAS(1)=KSTNU(S,L) - IRAS(2)=KSTNV(S,L) -C...FLOAT U & V - ERAS(1)=FLOAT(IRAS(1))*.01 - ERAS(2)=FLOAT(IRAS(2))*.01 -C -C...CONVERT TO WIND DIRECTION & SPEED -C - CALL W3FC05(ERAS(1),ERAS(2),DD,SS) -C -C...WITH DIR & SPEED IN WORK AREA, PLACE TEMPERATURE -TT- IN WORK - IRAS(3)=KSTNT(S,L) - TT=FLOAT(IRAS(3))*.01 -C -C...DIRECTION, SPEED & TEMP ALL REQUIRE ADDITIONAL TREATMENT TO -C MEET REQUIREMENTS OF BULLETIN FORMAT -C - NDDD=(DD+5.0)/10.0 -C...WIND DIRECTION ROUNDED TO NEAREST 10 DEGREEES -C -C...THERE IS A POSSIBILITY WIND DIRECTION NOT IN RANGE 1-36 - - IF ((NDDD.GT.36).OR.(NDDD.LE.0)) THEN - NDDD = MOD(NDDD, 36) - IF (NDDD.LE.0) NDDD = NDDD + 36 - ENDIF - NSSS=SS+0.5 -C -C...WIND SPEED ROUNDED TO NEAREST KNOT -C...FOR SPEED, KEEP UNITS AND TENS ONLY, WIND SPEEDS OF 100 -C THROUGH 199 KNOTS ARE INDICATED BY SUBTRACTING 100 FROM -C THE SPEED AND ADDING 50 TO DIRECTION. -C -C...WIND SPEEDS GREATER THAN 199 KNOTS ARE INDICATED AS A -C FORECAST SPEED OF 199 KNOTS AND ADDING 50 TO DIRECTION. -C - IF (NSSS.GT.199) THEN - NSSS=99 - NDDD=NDDD+50 -C...SPEED GT 99 AND LE 199 KNOTS - ELSE IF (NSSS.GT.99) THEN - NSSS=NSSS-100 - NDDD=NDDD+50 -C -C...SPEED LT 5 KNOTS (CONSIDERED CALM) AND EXPRESSED BY "9900" - ELSE IF (NSSS.LT.5) THEN - NSSS=0 - NDDD=99 - ENDIF -C -C...COMBINE DIR & SPEED IN ONE WORD I*4 - NDDSS=(NDDD*100)+NSSS -C -C...STORE IN ASCII IN LEVEL ARRAY, WIND FOR ONE STATION - CALL W3AI15(NDDSS,IWIND(S,L),1,4,MINUS) -C -C...TEMP NEXT. IF POSITIVE ROUND TO NEAREST DEGREE, CONV TO ASCII - NTTT = TT - IF (TT.LE.-0.5) NTTT = TT - 0.5 - IF (TT.GE.0.5) NTTT = TT + 0.5 - CALL W3AI15(NTTT,NTTT,1,3,MINUS) - IF (TT.GT.-0.5) NTTT4(1:1) = NNPOS(1:1) - -C...SIGN & 2 DIGITS OF TEMP NOW IN ASCII IN LEFT 3 BYTES OF NTTT -C - ITEMP(S,L)(1:3) = NTTT4(1:3) -C - 3750 CONTINUE -C...END LEVEL (WIND CONVERSION) -C................................. -C -C...AT END OF LVL LOOP FOR ONE STATION, ALL WIND & TEMP DATA IS ARRAYED, -C... IN ASCII, IN IWIND (4 CHARACTER DIR & SPEED) AND ITEMP (3 CHAR -C... INCL SIGN FOR 1ST 10 LVLS, 2 CHAR WITH NO SIGN FOR 5 UPPER LVLS) -C ABOVE 24,000 FEET, THE SIGN IS OMITTED SINCE TEMPERATURES ARE NEGATIVE. -C -C...BEFORE INSERTING INTO BULTN, TEMPS FOR LVLS OTHER THAN 3000' -C... WHICH ARE LESS THAN 2500' ABOVE STATION MUST BE ELIMINATED. -C... (TEMPS FOR 3000' ARE NOT TRANSMITTED) -C...WINDS ARE BLANKED FOR LVLS LESS THAN 1500' ABOVE STATION. -C - IF (IELEV(S).GT.9500) ITEMP(S,7) = IBLANK - IF (IELEV(S).GT.6500) ITEMP(S,6) = IBLANK - IF (IELEV(S).GT.3500) ITEMP(S,5) = IBLANK - ITEMP(S,4)=IBLANK - ITEMP(S,3)=IBLANK - ITEMP(S,2)=IBLANK - ITEMP(S,1)=IBLANK -C - IF (IELEV(S).GT.10500) IWIND(S,7) = IBLANK - IF (IELEV(S).GT.7500) IWIND(S,6) = IBLANK - IF (IELEV(S).GT.4500) IWIND(S,5) = IBLANK - IF (IELEV(S).GT.1500) IWIND(S,4) = IBLANK - -C...DATA FOR 1 STATION, 15 LVLS, 1 TAU NOW READY FOR BULTN LINE -C - 3900 CONTINUE -C...END STATION (WIND CONVERSION) -C -C...DATA FOR ALL STATIONS, ONE TAU, NOW READY FOR BULTN INSERTION -C********************************************************************** -C********************************************************************* -C -C...BULLETIN CREATION -C...REACH THIS POINT ONCE PER TAU -C...B IS BULTN CNT FOR TAU, BB CUMULATIVE BULTN CNT FOR RUN, -C... S IS SEQ NR OF STN. -C... (NOT NEEDED FOR U.S. WHICH IS SET AT #1.) - B = 0 - S = 0 - ENDBUL = .FALSE. -C - DO 6900 J = 1,2 -C....................................................................... -C -C...UPDATE STATION COUNTER -C - 4150 S = S + 1 -C - ICKX=INDIC(S) - IF (ICK(1:1).EQ.BSTART(1:1)) THEN - -C...GO TO START, OR CONTINUE, BULTN -C -C...BEGIN BULLETIN -C -C - B = B + 1 - BB = BB + 1 -C*********************************************************************** -C -C...PROCESS DATE-TIME FOR HEADINGS -C - IF (BB.EQ.1) THEN -C............................... -C...ONE TIME ENTRIES -C -C...TRAN HDGS - ITRDAY=JTIME(3) - IBSDAY=ICKDAY - WRITE(ITRTIM(1:4),'(2(I2.2))') JTIME(5), JTIME(6) -C - IF (TMODE.EQ.'T') THEN -C...BACKUP - IF (ICYC.EQ.3.OR.ICYC.EQ.4) THEN -C...TRAN DAY WILL BE NEXT DAY FOR 12Z, 18Z CYCLE BACKUP - ITRDAY=NDATE(3) - IF (ICYC.EQ.4.AND.T.EQ.3) IVLDAY=MDATE(3) - ENDIF - ENDIF -C...END TRAN BACKUP DAY-HOUR -C -C...PLACE TRAN & BASE DAY-HOUR IN HDGS - CALL W3AI15(ITRDAY,ITRDA,1,2,MINUS) - CALL W3AI15(IBSDAY,IBSDA,1,2,MINUS) - CALL W3AI15(IBSTIM,IBSTI,1,4,MINUS) -C - NBUL2(13:14) = ITRDA(1:2) - NBUL2(15:18) = ITRTIM(1:4) -C - NBASHD(15:16) = IBSDA(1:2) - NBASHD(17:20) = IBSTI(1:4) - ENDIF - -C **************************************************************** -C **************************************************************** -C IF REQUIRED TO INDICATE THE SOURCE FOR THESE FD BULLETINS -C REMOVE THE COMMENT STATUS FROM THE NEXT TWO LINES -C **************************************************************** -C **************************************************************** -C -C...END ONE-TIME ENTRIES -C............................ -C -C...BLANK OUT CONTROL DATE AFTER 1ST BULTN - IF (BB.EQ.2) NBULHD(13:20) = SPCS(1:8) -C -C...CATALOG NUMBER (AND 'P' OR 'B' FOR PRIMARY OR BACKUP RUN) - NBULHD(8:8) = TFLAG - NBULHD(4:7) = NCATNR(BB)(1:4) - NBULHD(43:46) = NFDHDG(BB)(1:4) -C -C INSERT AWIPS ID INTO BULLETIN HEADER -C - NBUL2(25:30) = AWIPSID(BB)(1:6) - - -C...END CATALOG NR -C -C...END TRAN HDGS -C..................................................................... -C -C...VALID-USE HDGS - IF (TMODE.EQ.'T') THEN - -C...BACKUP DAY-HOURS WILL BE SAME AS PRIMARY RUN OF OPPOSITE CYCLE - IVLDAY=NDATE(3) - IF (ICYC.EQ.1.AND.T.EQ.1) IVLDAY=IBSDAY - IF (ICYC.EQ.4.AND.T.EQ.3) IVLDAY=MDATE(3) -C -C...SET POINTER OPPOSITE (USE WITH T -RELATIVE TAU- TO SET HOURS) - IF (ICYC.EQ.1) KCYC=2 - IF (ICYC.EQ.3) KCYC=1 - ELSE - IVLDAY=IBSDAY - IF (T.EQ.3) IVLDAY=NDATE(3) - IF (ICYC.EQ.3.AND.T.EQ.2) IVLDAY=NDATE(3) - IF (ICYC.EQ.4) IVLDAY=NDATE(3) - ENDIF - -C...END BACKUP DAY-HOUR. -C -C...CONVERT TO ASCII AND PLACE IN HDGS - CALL W3AI15(IVLDAY,IVALDA,1,2,MINUS) - NVALHD(7:8) = IVALDA(1:2) - IITAU = ITAU - IF (ICYC.EQ.2) IITAU = ITAU + 3 - IF (ICYC.EQ.3) IITAU = ITAU + 6 - IF (ICYC.EQ.4) IITAU = ITAU + 9 - NVALHD(9:12) = NVALTM(IITAU)(1:4) - NVALHD(25:33) = NUSETM(IITAU)(1:9) -C -C...END VALID-USE HDGS -C -C...MOVE WORK HDGS TO BULTN O/P (TRAN, BASE, VALID, HEIGHT HDGS) - NEXT=0 - CALL WXAI19(NBULHD,74,BULTN,1280,NEXT) -C PRINT *,(NBULHD(L:L),L=41,70) - CALL WXAI19(NBASHD,28,BULTN,1280,NEXT) -C PRINT *,(NBASHD(L:L),L=1,25) - CALL WXAI19(NVALHD,60,BULTN,1280,NEXT) -C PRINT *, (NVALHD(L:L),L=1,55) - LINE73(1:73) = SPCS(1:73) - LINE73(1:2) = 'FT' - NPOS1 = 5 - DO 4500 N = LMTLWR(J), LMTUPR(J) - IF (N.LE.3) THEN - NPOS1 = NPOS1 - ELSE IF (N.EQ.4) THEN - NPOS1 = NPOS1 - 1 - ELSE IF ((N.GE.5).AND.(N.LE.6)) THEN - NPOS1 = NPOS1 + 2 - ELSE IF ((N.EQ.7).OR.(N.EQ.11)) THEN - NPOS1 = NPOS1 + 1 - ELSE IF (N.GT.7) THEN - NPOS1 = NPOS1 + 2 - ENDIF - NPOS2 = NPOS1 + 4 - LINE73(NPOS1:NPOS2) = NHGT6(N)(1:5) - NPOS1 = NPOS1 + NHGTP(N) - 4500 CONTINUE - -C PRINT *,(LINE73(II:II),II=1,NPOS2) - CALL WXAI19(LINE73,NPOS2,BULTN,1280,NEXT) - CALL WXAI19(CRCRLF,3,BULTN,1280,NEXT) - ENDIF -C -C...BULLETIN HDGS FOR ONE BULTN COMPLETE IN O/P AREA -C -C*********************************************************************** -C -C...CONTINUE BULTN, INSERTING DATA LINES. -C - NPOS1 = 5 - LINE73(1:73) = SPCS(1:73) - LINE73(1:1) = '$' - LINE73( 2: 5) = STNID(S)(1:4) - DO 5300 M = LMTLWR(J), LMTUPR(J) - NPOS1 = NPOS1 + 1 - NPOS2 = NPOS1 + 4 - LINE73(NPOS1:NPOS2) = IWIND(S,M)(1:4) - NPOS1 = NPOS1 + 4 - IF ((M.GT.4).AND.(M.LE.10))THEN - NPOS2 = NPOS1 + 2 - LINE73(NPOS1:NPOS2) = ITEMP(S,M)(1:3) - NPOS1 = NPOS1 + 3 - END IF - IF (M.GT.10) THEN - NPOS2 = NPOS1 + 1 - LINE73(NPOS1:NPOS2) = ITEMP(S,M)(2:3) - NPOS1 = NPOS1 + 2 - END IF - 5300 CONTINUE -C PRINT *,(LINE73(II:II),II=2,NPOS2) -C...NXTSAV HOLDS BYTE COUNT IN O/P BULTN FOR RESTORING WXAI19 'NEXT' -C... FIELD SO THAT WHEN 'NEXT' IS RETURNED AS -1, AN ADDITIONAL -C... LINEFEED AND/OR ETB OR ETX CAN BE INSERTED -C - IF (NEXT.GE.1207) THEN - CALL WXAI19 (ETB,1,BULTN,1280,NEXT) - LF = CHAR(10) - do ii=1,next - space(index) = bultn(ii) - if (index .eq. 1280) then - WRITE(51,REC=IREC) space, LF - IREC=IREC + 1 - index = 0 - do kk = 1,1280 - space(kk) = ' ' - enddo - endif - index = index + 1 - enddo -C WRITE(51) BULTN, LF - NEXT = 0 - ENDIF - CALL WXAI19(LINE73,NPOS2,BULTN,1280,NEXT) - CALL WXAI19(CRCRLF,3,BULTN,1280,NEXT) -C -C...AFTER LINE STORED IN O/P, GO TO CHECK BULTN END -C -C................................... -C -C...CHECK FOR LAST STN OF BULTN - IF (ICK(1:1).NE.BEND(1:1)) GO TO 4150 -C -C...END BULLETIN. SET UP RETURN FOR NEXT STN AFTER WRITE O/P. -C...SAVE SEQ NR OF LAST STN FOR SUBSEQUENT SEARCH FOR STNS -C - NXTSAV = NEXT - ENDBUL = .TRUE. -C*********************************************************************** -C -C...OUTPUT SECTION -C - NEXT = NXTSAV - ETBETX = ETB - IF (ENDBUL) ETBETX=ETX -C...END OF TRANSMIT BLOCK, OR END OF TRANSMISSION -C - CALL WXAI19(ETBETX,1,BULTN,1280,NEXT) -C -C...OUTPUT TO HOLD FILES - LF = CHAR(10) - do ii = 1,next - space(index) = bultn(ii) - if (index .eq. 1280) then - WRITE(51,REC=IREC) space, LF - IREC=IREC + 1 - index = 0 - do kk = 1,1280 - space(kk) = ' ' - enddo - endif - index = index + 1 - enddo -C -C...TRAN. -C -C NEXT=0 - ENDBUL=.FALSE. -C -C...RETURN TO START NEW BULTN, OR CONTINUE LINE FOR WHICH THERE WAS -C... INSUFFICIENT SPACE IN BLOCK JUST WRITTEN -C - 6900 CONTINUE -C -C*********************************************************************** - 7000 CONTINUE -C...END TAU LOOP -C -C...FT51 IS TRANSMISSION FILE -C END FILE 51 -C REWIND 51 - if (index .gt. 0) then - WRITE(51,REC=IREC) space, LF - IREC=IREC+1 - endif - KRET = 0 - - CALL W3TAGE('FBWNDGFS') - STOP - END - - SUBROUTINE WXAI19(LINE, L, NBLK, N, NEXT) -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: WXAI19 LINE BLOCKER SUBROUTINE -C AUTHOR: ALLARD, R. ORG: W342 DATE: 01 FEB 74 -C -C ABSTRACT: FILLS A RECORD BLOCK WITH LOGICAL RECORDS OR LINES -C OF INFORMATION. -C -C PROGRAM HISTORY LOG: -C 74-02-01 BOB ALLARD -C 90-09-15 R.E.JONES CONVERT FROM IBM370 ASSEMBLER TO MICROSOFT -C FORTRAN 5.0 -C 90-10-07 R.E.JONES CONVERT TO SUN FORTRAN 1.3 -C 91-07-20 R.E.JONES CONVERT TO SiliconGraphics 3.3 FORTRAN 77 -C 93-03-29 R.E.JONES ADD SAVE STATEMENT -C 94-04-22 R.E.JONES ADD XMOVEX AND XSTORE TO MOVE AND -C STORE CHARACTER DATA FASTER ON THE CRAY -C 96-07-18 R.E.JONES CHANGE EBCDIC FILL TO ASCII FILL -C 96-11-18 R.E.JONES CHANGE NAME W3AI19 TO WXAI19 -C -C USAGE: CALL WXAI19 (LINE, L, NBLK, N, NEXT) -C INPUT ARGUMENT LIST: -C LINE - ARRAY ADDRESS OF LOGICAL RECORD TO BE BLOCKED -C L - NUMBER OF CHARACTERS IN LINE TO BE BLOCKED -C N - MAXIMUM CHARACTER SIZE OF NBLK -C NEXT - FLAG, INITIALIZED TO 0 -C -C OUTPUT ARGUMENT LIST: -C NBLK - BLOCK FILLED WITH LOGICAL RECORDS -C NEXT - CHARACTER COUNT, ERROR INDICATOR -C -C EXIT STATES: -C NEXT = -1 LINE WILL NOT FIT INTO REMAINDER OF BLOCK; -C OTHERWISE, NEXT IS SET TO (NEXT + L) -C NEXT = -2 N IS ZERO OR LESS -C NEXT = -3 L IS ZERO OR LESS -C -C EXTERNAL REFERENCES: XMOVEX XSTORE -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN 90 -C -C$$$ -C -C METHOD: -C -C THE USER MUST SET NEXT = 0 EACH TIME NBLK IS TO BE FILLED WITH -C LOGICAL RECORDS. -C -C WXAI19 WILL THEN MOVE THE LINE OF INFORMATION INTO NBLK, STORE -C BLANK CHARACTERS IN THE REMAINDER OF THE BLOCK, AND SET NEXT = NEXT -C + L. -C -C EACH TIME WXAI19 IS ENTERED, ONE LINE IS BLOCKED AND NEXT INCRE- -C MENTED UNTIL A LINE WILL NOT FIT THE REMAINDER OF THE BLOCK. THEN -C WXAI19 WILL SET NEXT = -1 AS A FLAG FOR THE USER TO DISPOSE OF THE -C BLOCK. THE USER SHOULD BE AWARE THAT THE LAST LOGICAL RECORD WAS NOT -C BLOCKED. -C - INTEGER L - INTEGER N - INTEGER NEXT - INTEGER WBLANK -C - CHARACTER * 1 LINE(*) - CHARACTER * 1 NBLK(*) - CHARACTER * 1 BLANK -C - SAVE -C - DATA WBLANK/Z'2020202020202020'/ -C DATA WBLANK/Z''/ -C -C TEST VALUE OF NEXT. -C - IF (NEXT.LT.0) THEN - RETURN -C -C TEST N FOR ZERO OR LESS -C - ELSE IF (N.LE.0) THEN - NEXT = -2 - RETURN -C -C TEST L FOR ZERO OR LESS -C - ELSE IF (L.LE.0) THEN - NEXT = -3 - RETURN -C -C TEST TO SEE IF LINE WILL FIT IN BLOCK. -C - ELSE IF ((L + NEXT).GT.N) THEN - NEXT = -1 - RETURN -C -C FILL BLOCK WITH BLANK CHARACTERS IF NEXT EQUAL ZERO. -C BLANK IS ASCII BLANK, 20 HEX, OR 32 DECIMAL -C - ELSE IF (NEXT.EQ.0) THEN - CALL W3FI01(LW) - IWORDS = N / LW - CALL XSTORE(NBLK,WBLANK,IWORDS) - IF (MOD(N,LW).NE.0) THEN - NWORDS = IWORDS * LW - IBYTES = N - NWORDS - DO I = 1,IBYTES - NBLK(NWORDS+I) = CHAR(32) - END DO - END IF - END IF -C -C MOVE LINE INTO BLOCK. -C - CALL XMOVEX(NBLK(NEXT+1),LINE,L) -C -C ADJUST VALUE OF NEXT. -C - NEXT = NEXT + L -C - RETURN -C - END diff --git a/sorc/fbwndgfs.fd/makefile.GENERIC b/sorc/fbwndgfs.fd/makefile.GENERIC deleted file mode 100755 index f38539916f..0000000000 --- a/sorc/fbwndgfs.fd/makefile.GENERIC +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fbwndgfs.fd/makefile.theia b/sorc/fbwndgfs.fd/makefile.theia deleted file mode 100755 index f38539916f..0000000000 --- a/sorc/fbwndgfs.fd/makefile.theia +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fbwndgfs.fd/makefile.wcoss b/sorc/fbwndgfs.fd/makefile.wcoss deleted file mode 100755 index f38539916f..0000000000 --- a/sorc/fbwndgfs.fd/makefile.wcoss +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fbwndgfs.fd/makefile.wcoss_cray b/sorc/fbwndgfs.fd/makefile.wcoss_cray deleted file mode 100755 index 0ebe267cb9..0000000000 --- a/sorc/fbwndgfs.fd/makefile.wcoss_cray +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ftn -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -axCORE-AVX2 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fbwndgfs.fd/makefile.wcoss_dell_p3 b/sorc/fbwndgfs.fd/makefile.wcoss_dell_p3 deleted file mode 100755 index f38539916f..0000000000 --- a/sorc/fbwndgfs.fd/makefile.wcoss_dell_p3 +++ /dev/null @@ -1,81 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 12:00:38 PM on 11/27/96 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= fbwndgfs.f - -OBJS= fbwndgfs.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort -LDFLAGS = -LIBS = ${W3NCO_LIB8} ${W3EMC_LIB8} ${BACIO_LIB8} ${IP_LIB8} ${SP_LIB8} -CMD = fbwndgfs -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -I ${IP_INC8} -assume byterecl -convert big_endian -r8 -i8 -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -install: - mkdir -p ../../exec - cp -p $(CMD) ../../exec - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/fv3nc2nemsio.fd/0readme b/sorc/fv3nc2nemsio.fd/0readme deleted file mode 100644 index 7be2fbcd34..0000000000 --- a/sorc/fv3nc2nemsio.fd/0readme +++ /dev/null @@ -1,23 +0,0 @@ -The first version of this program was provided by Jeff Whitaker and Philip Pegion from ESRL. -Fanglin Ynag has subsequently made a few revsions. - -10/20/2016, Fanglin Yang -Note that FV3 lat-lon grids are located at the center of each grid box, -start from south to north, and from east to west. -For example, for a 0.5-deg uniform grid, -nlon=720, nlat=360 -X(1,1)=[0.25E,89.75S] -X(nlon,nlat)=[359.75E,89.75N] - -write out nemsio, S->N is reversed to N->S to follow NCEP convention - -12/18/2016 Fanglin Yang -updated to handle output of any frequency and any accumulation bucket - - -01/10/2017 Fanglin Yang -updated to handle both hydrostatic and nonhydrostatic cases. They have different output numbers and variable names. - -10/07/2017 Fanglin Yang -In FV3 tic26 branch which includes the lastest Write Component, hgtsfc has been defined as [m] instead of [gpm]. -The scaling by 1/grav in fv3nc2nemsio.fd needs to be removed. diff --git a/sorc/fv3nc2nemsio.fd/constants.f90 b/sorc/fv3nc2nemsio.fd/constants.f90 deleted file mode 100644 index c0a066eec0..0000000000 --- a/sorc/fv3nc2nemsio.fd/constants.f90 +++ /dev/null @@ -1,314 +0,0 @@ -! this module was extracted from the GSI version operational -! at NCEP in Dec. 2007. -module constants -!$$$ module documentation block -! . . . . -! module: constants -! prgmmr: treadon org: np23 date: 2003-09-25 -! -! abstract: This module contains the definition of various constants -! used in the gsi code -! -! program history log: -! 2003-09-25 treadon - original code -! 2004-03-02 treadon - allow global and regional constants to differ -! 2004-06-16 treadon - update documentation -! 2004-10-28 treadon - replace parameter tiny=1.e-12 with tiny_r_kind -! and tiny_single -! 2004-11-16 treadon - add huge_single, huge_r_kind parameters -! 2005-01-27 cucurull - add ione -! 2005-08-24 derber - move cg_term to constants from qcmod -! 2006-03-07 treadon - add rd_over_cp_mass -! 2006-05-18 treadon - add huge_i_kind -! 2006-06-06 su - add var-qc wgtlim, change value to 0.25 (ECMWF) -! 2006-07-28 derber - add r1000 -! -! Subroutines Included: -! sub init_constants - compute derived constants, set regional/global constants -! -! Variable Definitions: -! see below -! -! attributes: -! language: f90 -! machine: ibm RS/6000 SP -! -!$$$ - use kinds, only: r_single,r_kind,i_kind - implicit none - -! Declare constants - integer(i_kind) izero,ione - real(r_kind) rearth,grav,omega,rd,rv,cp,cv,cvap,cliq - real(r_kind) csol,hvap,hfus,psat,t0c,ttp,jcal,cp_mass,cg_term - real(r_kind) fv,deg2rad,rad2deg,pi,tiny_r_kind,huge_r_kind,huge_i_kind - real(r_kind) ozcon,rozcon,tpwcon,rd_over_g,rd_over_cp,g_over_rd - real(r_kind) amsua_clw_d1,amsua_clw_d2,constoz,zero,one,two,four - real(r_kind) one_tenth,quarter,three,five,rd_over_cp_mass, gamma - real(r_kind) rearth_equator,stndrd_atmos_ps,r1000 - real(r_kind) semi_major_axis,semi_minor_axis,n_a,n_b - real(r_kind) eccentricity,grav_polar,grav_ratio - real(r_kind) grav_equator,earth_omega,grav_constant - real(r_kind) flattening,eccentricity_linear,somigliana - real(r_kind) dldt,dldti,hsub,psatk,tmix,xa,xai,xb,xbi - real(r_kind) eps,epsm1,omeps,wgtlim - real(r_kind) elocp,cpr,el2orc,cclimit,climit,epsq - real(r_kind) pcpeff0,pcpeff1,pcpeff2,pcpeff3,rcp,c0,delta - real(r_kind) h1000,factor1,factor2,rhcbot,rhctop,dx_max,dx_min,dx_inv - real(r_kind) h300,half,cmr,cws,ke2,row,rrow - real(r_single) zero_single,tiny_single,huge_single - real(r_single) rmw_mean_distance, roic_mean_distance - logical :: constants_initialized = .true. - - -! Define constants common to global and regional applications -! name value description units -! ---- ----- ----------- ----- - parameter(rearth_equator= 6.37813662e6_r_kind) ! equatorial earth radius (m) - parameter(omega = 7.2921e-5_r_kind) ! angular velocity of earth (1/s) - parameter(cp = 1.0046e+3_r_kind) ! specific heat of air @pressure (J/kg/K) - parameter(cvap = 1.8460e+3_r_kind) ! specific heat of h2o vapor (J/kg/K) - parameter(csol = 2.1060e+3_r_kind) ! specific heat of solid h2o (ice)(J/kg/K) - parameter(hvap = 2.5000e+6_r_kind) ! latent heat of h2o condensation (J/kg) - parameter(hfus = 3.3358e+5_r_kind) ! latent heat of h2o fusion (J/kg) - parameter(psat = 6.1078e+2_r_kind) ! pressure at h2o triple point (Pa) - parameter(t0c = 2.7315e+2_r_kind) ! temperature at zero celsius (K) - parameter(ttp = 2.7316e+2_r_kind) ! temperature at h2o triple point (K) - parameter(jcal = 4.1855e+0_r_kind) ! joules per calorie () - parameter(stndrd_atmos_ps = 1013.25e2_r_kind) ! 1976 US standard atmosphere ps (Pa) - -! Numeric constants - parameter(izero = 0) - parameter(ione = 1) - parameter(zero_single = 0.0_r_single) - parameter(zero = 0.0_r_kind) - parameter(one_tenth = 0.10_r_kind) - parameter(quarter= 0.25_r_kind) - parameter(one = 1.0_r_kind) - parameter(two = 2.0_r_kind) - parameter(three = 3.0_r_kind) - parameter(four = 4.0_r_kind) - parameter(five = 5.0_r_kind) - parameter(r1000 = 1000.0_r_kind) - -! Constants for gps refractivity - parameter(n_a=77.6_r_kind) !K/mb - parameter(n_b=3.73e+5_r_kind) !K^2/mb - -! Parameters below from WGS-84 model software inside GPS receivers. - parameter(semi_major_axis = 6378.1370e3_r_kind) ! (m) - parameter(semi_minor_axis = 6356.7523142e3_r_kind) ! (m) - parameter(grav_polar = 9.8321849378_r_kind) ! (m/s2) - parameter(grav_equator = 9.7803253359_r_kind) ! (m/s2) - parameter(earth_omega = 7.292115e-5_r_kind) ! (rad/s) - parameter(grav_constant = 3.986004418e14_r_kind) ! (m3/s2) - -! Derived geophysical constants - parameter(flattening = (semi_major_axis-semi_minor_axis)/semi_major_axis)!() - parameter(somigliana = & - (semi_minor_axis/semi_major_axis) * (grav_polar/grav_equator) - one)!() - parameter(grav_ratio = (earth_omega*earth_omega * & - semi_major_axis*semi_major_axis * semi_minor_axis) / grav_constant) !() - -! Derived thermodynamic constants - parameter ( dldti = cvap-csol ) - parameter ( hsub = hvap+hfus ) - parameter ( psatk = psat*0.001_r_kind ) - parameter ( tmix = ttp-20._r_kind ) - parameter ( elocp = hvap/cp ) - parameter ( rcp = one/cp ) - -! Constants used in GFS moist physics - parameter ( h300 = 300._r_kind ) - parameter ( half = 0.5_r_kind ) - parameter ( cclimit = 0.001_r_kind ) - parameter ( climit = 1.e-20_r_kind) - parameter ( epsq = 2.e-12_r_kind ) - parameter ( h1000 = 1000.0_r_kind) - parameter ( rhcbot=0.85_r_kind ) - parameter ( rhctop=0.85_r_kind ) - parameter ( dx_max=-8.8818363_r_kind ) - parameter ( dx_min=-5.2574954_r_kind ) - parameter ( dx_inv=one/(dx_max-dx_min) ) - parameter ( c0=0.002_r_kind ) - parameter ( delta=0.6077338_r_kind ) - parameter ( pcpeff0=1.591_r_kind ) - parameter ( pcpeff1=-0.639_r_kind ) - parameter ( pcpeff2=0.0953_r_kind ) - parameter ( pcpeff3=-0.00496_r_kind ) - parameter ( cmr = one/0.0003_r_kind ) - parameter ( cws = 0.025_r_kind ) - parameter ( ke2 = 0.00002_r_kind ) - parameter ( row = 1000._r_kind ) - parameter ( rrow = one/row ) - -! Constant used to process ozone - parameter ( constoz = 604229.0_r_kind) - -! Constants used in cloud liquid water correction for AMSU-A -! brightness temperatures - parameter ( amsua_clw_d1 = 0.754_r_kind ) - parameter ( amsua_clw_d2 = -2.265_r_kind ) - -! Constants used for variational qc - parameter ( wgtlim = 0.25_r_kind) ! Cutoff weight for concluding that obs has been - ! rejected by nonlinear qc. This limit is arbitrary - ! and DOES NOT affect nonlinear qc. It only affects - ! the printout which "counts" the number of obs that - ! "fail" nonlinear qc. Observations counted as failing - ! nonlinear qc are still assimilated. Their weight - ! relative to other observations is reduced. Changing - ! wgtlim does not alter the analysis, only - ! the nonlinear qc data "count" - -! Constants describing the Extended Best-Track Reanalysis [Demuth et -! al., 2008] tropical cyclone (TC) distance for regions relative to TC -! track position; units are in kilometers - - parameter (rmw_mean_distance = 64.5479412) - parameter (roic_mean_distance = 338.319656) - -contains - subroutine init_constants_derived -!$$$ subprogram documentation block -! . . . . -! subprogram: init_constants_derived set derived constants -! prgmmr: treadon org: np23 date: 2004-12-02 -! -! abstract: This routine sets derived constants -! -! program history log: -! 2004-12-02 treadon -! 2005-03-03 treadon - add implicit none -! -! input argument list: -! -! output argument list: -! -! attributes: -! language: f90 -! machine: ibm rs/6000 sp -! -!$$$ - implicit none - -! Trigonometric constants - pi = acos(-one) - deg2rad = pi/180.0_r_kind - rad2deg = one/deg2rad - cg_term = (sqrt(two*pi))/two ! constant for variational qc - tiny_r_kind = tiny(zero) - huge_r_kind = huge(zero) - tiny_single = tiny(zero_single) - huge_single = huge(zero_single) - huge_i_kind = huge(izero) - -! Geophysical parameters used in conversion of geopotential to -! geometric height - eccentricity_linear = sqrt(semi_major_axis**2 - semi_minor_axis**2) - eccentricity = eccentricity_linear / semi_major_axis - constants_initialized = .true. - - return - end subroutine init_constants_derived - - subroutine init_constants(regional) -!$$$ subprogram documentation block -! . . . . -! subprogram: init_constants set regional or global constants -! prgmmr: treadon org: np23 date: 2004-03-02 -! -! abstract: This routine sets constants specific to regional or global -! applications of the gsi -! -! program history log: -! 2004-03-02 treadon -! 2004-06-16 treadon, documentation -! 2004-10-28 treadon - use intrinsic TINY function to set value -! for smallest machine representable positive -! number -! 2004-12-03 treadon - move derived constants to init_constants_derived -! 2005-03-03 treadon - add implicit none -! -! input argument list: -! regional - if .true., set regional gsi constants; -! otherwise (.false.), use global constants -! -! output argument list: -! -! attributes: -! language: f90 -! machine: ibm rs/6000 sp -! -!$$$ - implicit none - logical regional - real(r_kind) reradius,g,r_d,r_v,cliq_wrf - - gamma = 0.0065 - -! Define regional constants here - if (regional) then - -! Name given to WRF constants - reradius = one/6370.e03_r_kind - g = 9.81_r_kind - r_d = 287.04_r_kind - r_v = 461.6_r_kind - cliq_wrf = 4190.0_r_kind - cp_mass = 1004.67_r_kind - -! Transfer WRF constants into unified GSI constants - rearth = one/reradius - grav = g - rd = r_d - rv = r_v - cv = cp-r_d - cliq = cliq_wrf - rd_over_cp_mass = rd / cp_mass - -! Define global constants here - else - rearth = 6.3712e+6_r_kind - grav = 9.80665e+0_r_kind - rd = 2.8705e+2_r_kind - rv = 4.6150e+2_r_kind - cv = 7.1760e+2_r_kind - cliq = 4.1855e+3_r_kind - cp_mass= zero - rd_over_cp_mass = zero - endif - - -! Now define derived constants which depend on constants -! which differ between global and regional applications. - -! Constants related to ozone assimilation - ozcon = grav*21.4e-9_r_kind - rozcon= one/ozcon - -! Constant used in vertical integral for precipitable water - tpwcon = 100.0_r_kind/grav - -! Derived atmospheric constants - fv = rv/rd-one ! used in virtual temperature equation - dldt = cvap-cliq - xa = -(dldt/rv) - xai = -(dldti/rv) - xb = xa+hvap/(rv*ttp) - xbi = xai+hsub/(rv*ttp) - eps = rd/rv - epsm1 = rd/rv-one - omeps = one-eps - factor1 = (cvap-cliq)/rv - factor2 = hvap/rv-factor1*t0c - cpr = cp*rd - el2orc = hvap*hvap/(rv*cp) - rd_over_g = rd/grav - rd_over_cp = rd/cp - g_over_rd = grav/rd - - return - end subroutine init_constants - -end module constants diff --git a/sorc/fv3nc2nemsio.fd/fv3_main.f90 b/sorc/fv3nc2nemsio.fd/fv3_main.f90 deleted file mode 100644 index 48c7440b14..0000000000 --- a/sorc/fv3nc2nemsio.fd/fv3_main.f90 +++ /dev/null @@ -1,215 +0,0 @@ -program fv3_main - use fv3_module - use netcdf - use nemsio_module - implicit none - - type(nemsio_gfile) :: gfile - type(nemsio_meta) :: meta_nemsio - integer,parameter :: nvar2d=48 - character(nemsio_charkind) :: name2d(nvar2d) - integer :: nvar3d - character(nemsio_charkind), allocatable :: name3din(:), name3dout(:) - character(nemsio_charkind) :: varname,levtype - character(len=300) :: inpath,outpath - character(len=100) :: infile2d,infile3d,outfile - character(len=10) :: analdate, cfhour - character(len=5) :: cfhr,cfhzh - character(len=2) :: nhcase - real , allocatable :: lons(:),lats(:),tmp2d(:,:), tmp2dx(:,:) - real*8,allocatable :: tmp1d(:),tmp1dx(:),fhours(:) - real*4 :: fhour - integer :: fhzh, nhcas - - integer :: ii,i,j,k,ncid2d,ncid3d,ifhr,nlevs,nlons,nlats,ntimes,nargs,iargc,YYYY,MM,DD,HH,stat,varid - - data name2d /'ALBDOsfc','CPRATsfc','PRATEsfc','DLWRFsfc','ULWRFsfc','DSWRFsfc','USWRFsfc','DSWRFtoa','USWRFtoa',& - 'ULWRFtoa','GFLUXsfc','HGTsfc','HPBLsfc',& - 'ICECsfc','SLMSKsfc','LHTFLsfc','SHTFLsfc','PRESsfc','PWATclm','SOILM','SOILW1','SOILW2','SOILW3','SOILW4','SPFH2m',& - 'SOILT1','SOILT2','SOILT3','SOILT4','TMP2m','TMPsfc','UGWDsfc','VGWDsfc','UFLXsfc','VFLXsfc','UGRD10m','VGRD10m',& - 'WEASDsfc','SNODsfc','ZORLsfc','VFRACsfc','F10Msfc','VTYPEsfc','STYPEsfc',& - 'TCDCclm', 'TCDChcl', 'TCDCmcl', 'TCDClcl'/ - - !===================================================================== - - ! read in from command line - nargs=iargc() - IF (nargs .NE. 10) THEN - print*,'usage fv3_interface analdate ifhr fhzh fhour inpath infile2d infile3d outpath,outfile,nhcase' - STOP 1 - ENDIF - call getarg(1,analdate) - call getarg(2,cfhr) - call getarg(3,cfhzh) - call getarg(4,cfhour) - call getarg(5,inpath) - call getarg(6,infile2d) - call getarg(7,infile3d) - call getarg(8,outpath) - call getarg(9,outfile) - call getarg(10,nhcase) -! print*,analdate,cfhr,cfhzh,cfhour,inpath,infile2d,infile3d,outpath,outfile,nhcase - - read(nhcase,'(i2.1)') nhcas - read(cfhr,'(i5.1)') ifhr - read(cfhzh,'(i5.1)') fhzh - read(cfhour,*) fhour - read(analdate(1:4),'(i4)') YYYY - read(analdate(5:6),'(i2)') MM - read(analdate(7:8),'(i2)') DD - read(analdate(9:10),'(i2)') HH - print*,"ifhr,fhzh,fhour,analdate ",ifhr,fhzh,fhour,analdate - - if (nhcas == 0 ) then !non-hydrostatic case - nvar3d=9 - allocate (name3din(nvar3d), name3dout(nvar3d)) - name3din=(/'ucomp ','vcomp ','temp ','sphum ','o3mr ','nhpres','w ','clwmr ','delp '/) - name3dout=(/'ugrd ','vgrd ','tmp ','spfh ','o3mr ','pres ','vvel ','clwmr','dpres'/) - else - nvar3d=8 - allocate (name3din(nvar3d), name3dout(nvar3d)) - name3din=(/'ucomp ','vcomp ','temp ','sphum ','o3mr ','hypres','clwmr ','delp '/) - name3dout=(/'ugrd ','vgrd ','tmp ','spfh ','o3mr ','pres ','clwmr','dpres'/) - endif - - ! open netcdf files - print*,'reading',trim(inpath)//'/'//trim(infile2d) - stat = nf90_open(trim(inpath)//'/'//trim(infile2d),NF90_NOWRITE, ncid2d) - if (stat .NE.0) print*,stat - print*,'reading',trim(inpath)//'/'//trim(infile3d) - stat = nf90_open(trim(inpath)//'/'//trim(infile3d),NF90_NOWRITE, ncid3d) - if (stat .NE.0) print*,stat - ! get dimesions - - stat = nf90_inq_dimid(ncid2d,'time',varid) - if (stat .NE.0) print*,stat,varid - if (stat .NE. 0) STOP 1 - stat = nf90_inquire_dimension(ncid2d,varid,len=ntimes) - if (stat .NE.0) print*,stat,ntimes - if (stat .NE. 0) STOP 1 - allocate(fhours(ntimes)) - stat = nf90_inq_varid(ncid2d,'time',varid) - if (stat .NE. 0) STOP 1 - stat = nf90_get_var(ncid2d,varid,fhours) - if (stat .NE.0) print*,stat,fhours - if (stat .NE. 0) STOP 1 - - stat = nf90_inq_dimid(ncid3d,'grid_xt',varid) - if (stat .NE.0) print*,stat,varid - if (stat .NE. 0) STOP 1 - stat = nf90_inquire_dimension(ncid3d,varid,len=nlons) - if (stat .NE.0) print*,stat,nlons - if (stat .NE. 0) STOP 1 - allocate(lons(nlons)) - allocate(tmp1d(nlons)) - stat = nf90_inq_varid(ncid3d,'grid_xt',varid) - if (stat .NE. 0) STOP 1 - stat = nf90_get_var(ncid3d,varid,tmp1d) - if (stat .NE.0) print*,stat - if (stat .NE. 0) STOP 1 - - lons=real(tmp1d,kind=4) - !print*,lons(1),lons(3072) - deallocate(tmp1d) - - stat = nf90_inq_dimid(ncid3d,'grid_yt',varid) - if (stat .NE.0) print*,stat - if (stat .NE. 0) STOP 1 - stat = nf90_inquire_dimension(ncid3d,varid,len=nlats) - if (stat .NE.0) print*,stat - if (stat .NE. 0) STOP 1 - allocate(lats(nlats)) - allocate(tmp1d(nlats)) - allocate(tmp1dx(nlats)) - stat = nf90_inq_varid(ncid3d,'grid_yt',varid) - stat = nf90_get_var(ncid3d,varid,tmp1dx,start=(/1/),count=(/nlats/)) - if (stat .NE.0) print*,stat - if (stat .NE. 0) STOP 1 - do j=1,nlats - tmp1d(j)=tmp1dx(nlats-j+1) - enddo - lats=real(tmp1d,kind=4) - print*,"lats_beg, lats_end",lats(1),lats(nlats) - deallocate(tmp1d, tmp1dx) - - stat = nf90_inq_dimid(ncid3d,'pfull',varid) - if (stat .NE.0) print*,stat - if (stat .NE. 0) STOP 1 - stat = nf90_inquire_dimension(ncid3d,varid,len=nlevs) - if (stat .NE.0) print*,stat - if (stat .NE. 0) STOP 1 - - call define_nemsio_meta(meta_nemsio,nlons,nlats,nlevs,nvar2d,nvar3d,lons,lats) - - allocate (tmp2d(nlons,nlats)) - allocate (tmp2dx(nlons,nlats)) - - meta_nemsio%idate(1)=YYYY - meta_nemsio%idate(2)=MM - meta_nemsio%idate(3)=DD - meta_nemsio%idate(4)=HH - - meta_nemsio%varrval(1)=float(fhzh) -! if (ifhr.EQ.0) then -! meta_nemsio%varrval(1)=0.0 -! else -! meta_nemsio%varrval(1)=(ifhr-1.0)*6.0 -! endif - - ! read in data - meta_nemsio%nfhour= fhours(ifhr) - meta_nemsio%fhour= fhours(ifhr) - print*,fhours(ifhr),ifhr,'calling netcdf read' -!--for ifhr=1, fhours=dt but fhour=00 if diag is determined by FHOUT - if (fhour .ne. fhours(ifhr) .and. ifhr.gt.1 )then - print*, 'requested ',fhour, ' not equal to fhours(ifhr) ', fhours(ifhr) - print*, 'abort ! ' - stop 1 - endif - - call nems_write_init(outpath,outfile,meta_nemsio,gfile) -! read in all of the 2d variables and write out - print*,'calling write',meta_nemsio%rlat_min,meta_nemsio%rlat_max - print*,'lats',minval(meta_nemsio%lat),maxval(meta_nemsio%lat) - print *,'loop over 2d variables' - DO i=1,nvar2d - print *,i,trim(name2d(i)) - call fv3_netcdf_read_2d(ncid2d,ifhr,meta_nemsio,name2d(i),tmp2dx) - do ii=1,nlons - do j=1,nlats - tmp2d(ii,j)=tmp2dx(ii,nlats-j+1) - enddo - enddo - call nems_write(gfile,meta_nemsio%recname(i),meta_nemsio%reclevtyp(i),meta_nemsio%reclev(i), & - nlons*nlats,tmp2d,stat) - ENDDO - levtype='mid layer' -! loop through 3d fields - print *,'loop over 3d variables' - DO i=1,nvar3d - print*,i,trim(name3din(i)) - DO k=1,nlevs -! print*,k - call fv3_netcdf_read_3d(ncid3d,ifhr,meta_nemsio,name3din(i),k,tmp2dx) - do ii=1,nlons - do j=1,nlats - tmp2d(ii,j)=tmp2dx(ii,nlats-j+1) - enddo - enddo - call nems_write(gfile,name3dout(i),levtype,nlevs-k+1,nlons*nlats,tmp2d(:,:),stat) - IF (stat .NE. 0) then - print*,'error writing ,named3dout(i)',stat - STOP 1 - ENDIF - ENDDO - ENDDO - - call nemsio_close(gfile,iret=stat) - stat = nf90_close(ncid2d) - stat = nf90_close(ncid3d) - - deallocate(tmp2dx,tmp2d) - deallocate(name3din,name3dout) - - stop -end program fv3_main diff --git a/sorc/fv3nc2nemsio.fd/fv3_module.f90 b/sorc/fv3nc2nemsio.fd/fv3_module.f90 deleted file mode 100644 index 8d161acfcf..0000000000 --- a/sorc/fv3nc2nemsio.fd/fv3_module.f90 +++ /dev/null @@ -1,372 +0,0 @@ -module fv3_module - - - !======================================================================= - - ! Define associated modules and subroutines - - !----------------------------------------------------------------------- - use netcdf - use constants - use kinds - use nemsio_module - - type nemsio_meta - character(nemsio_charkind), dimension(:), allocatable :: recname - character(nemsio_charkind), dimension(:), allocatable :: reclevtyp - character(16), dimension(:), allocatable :: variname - character(16), dimension(:), allocatable :: varrname - character(16), dimension(:), allocatable :: varr8name - character(16), dimension(:), allocatable :: aryiname - character(16), dimension(:), allocatable :: aryr8name - character(nemsio_charkind8) :: gdatatype - character(nemsio_charkind8) :: modelname - real(nemsio_realkind) :: rlon_min - real(nemsio_realkind) :: rlon_max - real(nemsio_realkind) :: rlat_min - real(nemsio_realkind) :: rlat_max - real(nemsio_realkind), dimension(:), allocatable :: lon - real(nemsio_realkind), dimension(:), allocatable :: lat - real(nemsio_realkind), dimension(:), allocatable :: varrval - integer(nemsio_intkind), dimension(:,:), allocatable :: aryival - integer(nemsio_intkind), dimension(:), allocatable :: reclev - integer(nemsio_intkind), dimension(:), allocatable :: varival - integer(nemsio_intkind), dimension(:), allocatable :: aryilen - integer(nemsio_intkind), dimension(:), allocatable :: aryr8len - integer(nemsio_intkind) :: idate(7) - integer(nemsio_intkind) :: version - integer(nemsio_intkind) :: nreo_vc - integer(nemsio_intkind) :: nrec - integer(nemsio_intkind) :: nmeta - integer(nemsio_intkind) :: nmetavari - integer(nemsio_intkind) :: nmetaaryi - integer(nemsio_intkind) :: nmetavarr - integer(nemsio_intkind) :: nfhour - integer(nemsio_intkind) :: nfminute - integer(nemsio_intkind) :: nfsecondn - integer(nemsio_intkind) :: nfsecondd - integer(nemsio_intkind) :: dimx - integer(nemsio_intkind) :: dimy - integer(nemsio_intkind) :: dimz - integer(nemsio_intkind) :: nframe - integer(nemsio_intkind) :: nsoil - integer(nemsio_intkind) :: ntrac - integer(nemsio_intkind) :: ncldt - integer(nemsio_intkind) :: idvc - integer(nemsio_intkind) :: idsl - integer(nemsio_intkind) :: idvm - integer(nemsio_intkind) :: idrt - integer(nemsio_intkind) :: fhour - - end type nemsio_meta ! type nemsio_meta - contains -!----------------------------------------------------------------------- - subroutine fv3_netcdf_read_2d(ncid2d,ifhr,meta_nemsio,varname,data2d) - - implicit none - type(nemsio_meta) :: meta_nemsio - integer :: ncid2d - integer :: ifhr,varid,stat - real :: data2d(meta_nemsio%dimx,meta_nemsio%dimy) - character(nemsio_charkind) :: varname - - ! loop through 2d data - stat = nf90_inq_varid(ncid2d,trim(varname),varid) - !print*,stat,varid,trim(varname) - stat = nf90_get_var(ncid2d,varid,data2d,start=(/1,1,ifhr/),count=(/meta_nemsio%dimx,meta_nemsio%dimy,1/)) - IF (stat .NE. 0 ) THEN - print*,'error reading ',varname - STOP - ENDIF - -end subroutine fv3_netcdf_read_2d -!----------------------------------------------------------------------- - - subroutine fv3_netcdf_read_3d(ncid3d,ifhr,meta_nemsio,varname,k,data2d) - - implicit none - - type(nemsio_meta) :: meta_nemsio - integer :: ncid3d - integer :: k - integer :: ifhr,varid,stat - character(nemsio_charkind) :: varname - !real :: data3d(meta_nemsio%dimx,meta_nemsio%dimy,meta_nemsio%dimz) - real :: data2d(meta_nemsio%dimx,meta_nemsio%dimy) - - - stat = nf90_inq_varid(ncid3d,trim(varname),varid) - !print*,stat,varname,varid - !stat = nf90_get_var(ncid3d,varid,data3d,start=(/1,1,1,ifhr/),count=(/meta_nemsio%dimx,meta_nemsio%dimy,meta_nemsio%dimz,1/)) - stat = nf90_get_var(ncid3d,varid,data2d,start=(/1,1,k,ifhr/),count=(/meta_nemsio%dimx,meta_nemsio%dimy,1,1/)) - - IF (stat .NE. 0 ) THEN - print*,'error reading ',varname - STOP - ENDIF - -end subroutine fv3_netcdf_read_3d -!----------------------------------------------------------------------- - - subroutine define_nemsio_meta(meta_nemsio,nlons,nlats,nlevs,nvar2d,nvar3d,lons,lats) - implicit none - type(nemsio_meta) :: meta_nemsio - integer :: nlons,nlats,nlevs,i,j,k,nvar2d,nvar3d - integer*8 :: ct - real :: lons(nlons),lats(nlats) -! local - - meta_nemsio%idate(1:6) = 0 - meta_nemsio%idate(7) = 1 - meta_nemsio%modelname = 'GFS' - meta_nemsio%version = 198410 - meta_nemsio%nrec = nvar2d + nlevs*nvar3d - meta_nemsio%nmeta = 8 - meta_nemsio%nmetavari = 3 - meta_nemsio%nmetavarr = 1 - meta_nemsio%nmetaaryi = 1 - meta_nemsio%dimx = nlons - meta_nemsio%dimy = nlats - meta_nemsio%dimz = nlevs - meta_nemsio%rlon_min = minval(lons) - meta_nemsio%rlon_max = maxval(lons) - meta_nemsio%rlat_min = minval(lats) - meta_nemsio%rlat_max = maxval(lats) - meta_nemsio%nsoil = 4 - meta_nemsio%nframe = 0 - meta_nemsio%nfminute = 0 - meta_nemsio%nfsecondn = 0 - meta_nemsio%nfsecondd = 1 - meta_nemsio%ntrac = 3 - meta_nemsio%idrt = 0 - meta_nemsio%ncldt = 3 - meta_nemsio%idvc = 2 - - - allocate(meta_nemsio%recname(meta_nemsio%nrec)) - allocate(meta_nemsio%reclevtyp(meta_nemsio%nrec)) - allocate(meta_nemsio%reclev(meta_nemsio%nrec)) - allocate(meta_nemsio%variname(meta_nemsio%nmetavari)) - allocate(meta_nemsio%varival(meta_nemsio%nmetavari)) - allocate(meta_nemsio%aryiname(meta_nemsio%nmetavari)) - allocate(meta_nemsio%aryilen(meta_nemsio%nmetavari)) - allocate(meta_nemsio%varrname(meta_nemsio%nmetavarr)) - allocate(meta_nemsio%varrval(meta_nemsio%nmetavarr)) - allocate(meta_nemsio%lon(nlons*nlats)) - allocate(meta_nemsio%lat(nlons*nlats)) - - meta_nemsio%varrname(1)='zhour' - meta_nemsio%variname(1)='cu_physics' - meta_nemsio%varival(1)=4 - meta_nemsio%variname(2)='mp_physics' - meta_nemsio%varival(2)=1000 - meta_nemsio%variname(3)='IVEGSRC' - meta_nemsio%varival(3)=2 - ct=1 - DO j=1,nlats - DO i=1,nlons - meta_nemsio%lon(ct) = lons(i) - meta_nemsio%lat(ct) = lats(j) - ct=ct+1 - ENDDO - ENDDO - - meta_nemsio%aryilen(1) = nlats/2 - meta_nemsio%aryiname(1) = 'lpl' - meta_nemsio%reclev(:)=1 - meta_nemsio%recname(1) = 'albdo_ave' - meta_nemsio%reclevtyp(1) = 'sfc' - meta_nemsio%recname(2) = 'cprat_ave' - meta_nemsio%reclevtyp(2) = 'sfc' - meta_nemsio%recname(3) = 'prate_ave' - meta_nemsio%reclevtyp(3) = 'sfc' - meta_nemsio%recname(4) = 'dlwrf_ave' - meta_nemsio%reclevtyp(4) = 'sfc' - meta_nemsio%recname(5) = 'ulwrf_ave' - meta_nemsio%reclevtyp(5) = 'sfc' - meta_nemsio%recname(6) = 'dswrf_ave' - meta_nemsio%reclevtyp(6) = 'sfc' - meta_nemsio%recname(7) = 'uswrf_ave' - meta_nemsio%reclevtyp(7) = 'sfc' - meta_nemsio%recname(8) = 'dswrf_ave' - meta_nemsio%reclevtyp(8) = 'nom. top' - meta_nemsio%recname(9) = 'uswrf_ave' - meta_nemsio%reclevtyp(9) = 'nom. top' - meta_nemsio%recname(10) = 'ulwrf_ave' - meta_nemsio%reclevtyp(10) = 'nom. top' - meta_nemsio%recname(11) = 'gflux_ave' - meta_nemsio%reclevtyp(11) = 'sfc' - meta_nemsio%recname(12) = 'hgt' - meta_nemsio%reclevtyp(12) = 'sfc' - meta_nemsio%recname(13) = 'hpbl' - meta_nemsio%reclevtyp(13) = 'sfc' - meta_nemsio%recname(14) = 'icec' - meta_nemsio%reclevtyp(14) = 'sfc' - meta_nemsio%recname(15) = 'land' - meta_nemsio%reclevtyp(15) = 'sfc' - meta_nemsio%recname(16) = 'lhtfl_ave' - meta_nemsio%reclevtyp(16) = 'sfc' - meta_nemsio%recname(17) = 'shtfl_ave' - meta_nemsio%reclevtyp(17) = 'sfc' - meta_nemsio%recname(18) = 'pres' - meta_nemsio%reclevtyp(18) = 'sfc' - meta_nemsio%recname(19) = 'pwat' - meta_nemsio%reclevtyp(19) = 'atmos col' - meta_nemsio%recname(20) = 'soilm' - meta_nemsio%reclevtyp(20) = '0-200 cm down' - meta_nemsio%recname(21) = 'soilw' - meta_nemsio%reclevtyp(21) = '0-10 cm down' - meta_nemsio%recname(22) = 'soilw' - meta_nemsio%reclevtyp(22) = '10-40 cm down' - meta_nemsio%recname(23) = 'soilw' - meta_nemsio%reclevtyp(23) = '40-100 cm down' - meta_nemsio%recname(24) = 'soilw' - meta_nemsio%reclevtyp(24) = '100-200 cm down' - meta_nemsio%recname(25) = 'spfh' - meta_nemsio%reclevtyp(25) = '2 m above gnd' - meta_nemsio%recname(26) = 'tmp' - meta_nemsio%reclevtyp(26) = '0-10 cm down' - meta_nemsio%recname(27) = 'tmp' - meta_nemsio%reclevtyp(27) = '10-40 cm down' - meta_nemsio%recname(28) = 'tmp' - meta_nemsio%reclevtyp(28) = '40-100 cm down' - meta_nemsio%recname(29) = 'tmp' - meta_nemsio%reclevtyp(29) = '100-200 cm down' - meta_nemsio%recname(30) = 'tmp' - meta_nemsio%reclevtyp(30) = '2 m above gnd' - meta_nemsio%recname(31) = 'tmp' - meta_nemsio%reclevtyp(31) = 'sfc' - meta_nemsio%recname(32) = 'ugwd' - meta_nemsio%reclevtyp(32) = 'sfc' - meta_nemsio%recname(33) = 'vgwd' - meta_nemsio%reclevtyp(33) = 'sfc' - meta_nemsio%recname(34) = 'uflx_ave' - meta_nemsio%reclevtyp(34) = 'sfc' - meta_nemsio%recname(35) = 'vflx_ave' - meta_nemsio%reclevtyp(35) = 'sfc' - meta_nemsio%recname(36) = 'ugrd' - meta_nemsio%reclevtyp(36) = '10 m above gnd' - meta_nemsio%recname(37) = 'vgrd' - meta_nemsio%reclevtyp(37) = '10 m above gnd' - meta_nemsio%recname(38) = 'weasd' - meta_nemsio%reclevtyp(38) = 'sfc' - meta_nemsio%recname(39) = 'snod' - meta_nemsio%reclevtyp(39) = 'sfc' - meta_nemsio%recname(40) = 'zorl' - meta_nemsio%reclevtyp(40) = 'sfc' - meta_nemsio%recname(41) = 'vfrac' - meta_nemsio%reclevtyp(41) = 'sfc' - meta_nemsio%recname(42) = 'f10m' - meta_nemsio%reclevtyp(42) = 'sfc' - meta_nemsio%recname(43) = 'vtype' - meta_nemsio%reclevtyp(43) = 'sfc' - meta_nemsio%recname(44) = 'stype' - meta_nemsio%reclevtyp(44) = 'sfc' - meta_nemsio%recname(45) = 'tcdc_ave' - meta_nemsio%reclevtyp(45) = 'atmos col' - meta_nemsio%recname(46) = 'tcdc_ave' - meta_nemsio%reclevtyp(46) = 'high cld lay' - meta_nemsio%recname(47) = 'tcdc_ave' - meta_nemsio%reclevtyp(47) = 'mid cld lay' - meta_nemsio%recname(48) = 'tcdc_ave' - meta_nemsio%reclevtyp(48) = 'low cld lay' -! loop through 3d variables - DO k = 1, nlevs - meta_nemsio%recname(k+nvar2d) = 'ugrd' - meta_nemsio%reclevtyp(k+nvar2d) = 'mid layer' - meta_nemsio%reclev(k+nvar2d) = k - meta_nemsio%recname(k+nvar2d+nlevs) = 'vgrd' - meta_nemsio%reclevtyp(k+nvar2d+nlevs) = 'mid layer' - meta_nemsio%reclev(k+nvar2d+nlevs) = k - meta_nemsio%recname(k+nvar2d+nlevs*2) = 'tmp' - meta_nemsio%reclevtyp(k+nvar2d+nlevs*2) = 'mid layer' - meta_nemsio%reclev(k+nvar2d+nlevs*2) = k - meta_nemsio%recname(k+nvar2d+nlevs*3) = 'spfh' - meta_nemsio%reclevtyp(k+nvar2d+nlevs*3) = 'mid layer' - meta_nemsio%reclev(k+nvar2d+nlevs*3) = k - meta_nemsio%recname(k+nvar2d+nlevs*4) = 'o3mr' - meta_nemsio%reclevtyp(k+nvar2d+nlevs*4) = 'mid layer' - meta_nemsio%reclev(k+nvar2d+nlevs*4) = k - meta_nemsio%recname(k+nvar2d+nlevs*5) = 'pres' - meta_nemsio%reclevtyp(k+nvar2d+nlevs*5) = 'mid layer' - meta_nemsio%reclev(k+nvar2d+nlevs*5) = k - meta_nemsio%recname(k+nvar2d+nlevs*6) = 'clwmr' - meta_nemsio%reclevtyp(k+nvar2d+nlevs*6) = 'mid layer' - meta_nemsio%reclev(k+nvar2d+nlevs*6) = k - meta_nemsio%recname(k+nvar2d+nlevs*7) = 'dpres' - meta_nemsio%reclevtyp(k+nvar2d+nlevs*7) = 'mid layer' - meta_nemsio%reclev(k+nvar2d+nlevs*7) = k - if (nvar3d == 9) then - meta_nemsio%recname(k+nvar2d+nlevs*8) = 'vvel' - meta_nemsio%reclevtyp(k+nvar2d+nlevs*8) = 'mid layer' - meta_nemsio%reclev(k+nvar2d+nlevs*8) = k - endif - ENDDO - - end subroutine define_nemsio_meta - - subroutine nems_write_init(datapath,filename_base,meta_nemsio,gfile) - - - implicit none - - type(nemsio_meta) :: meta_nemsio - character(len=200) :: datapath - character(len=100) :: filename_base - character(len=400) :: filename - type(nemsio_gfile) :: gfile - integer :: nemsio_iret - integer :: i, j, k - - write(filename,500) trim(datapath)//'/'//trim(filename_base) -500 format(a,i3.3) - print*,trim(filename) - call nemsio_init(iret=nemsio_iret) - print*,'iret=',nemsio_iret - !gfile%gtype = 'NEMSIO' - meta_nemsio%gdatatype = 'bin4' - call nemsio_open(gfile,trim(filename),'write', & - & iret=nemsio_iret, & - & modelname=trim(meta_nemsio%modelname), & - & version=meta_nemsio%version,gdatatype=meta_nemsio%gdatatype, & - & dimx=meta_nemsio%dimx,dimy=meta_nemsio%dimy, & - & dimz=meta_nemsio%dimz,rlon_min=meta_nemsio%rlon_min, & - & rlon_max=meta_nemsio%rlon_max,rlat_min=meta_nemsio%rlat_min, & - & rlat_max=meta_nemsio%rlat_max, & - & lon=meta_nemsio%lon,lat=meta_nemsio%lat, & - & idate=meta_nemsio%idate,nrec=meta_nemsio%nrec, & - & nframe=meta_nemsio%nframe,idrt=meta_nemsio%idrt,ncldt= & - & meta_nemsio%ncldt,idvc=meta_nemsio%idvc, & - & nfhour=meta_nemsio%nfhour,nfminute=meta_nemsio%nfminute, & - & nfsecondn=meta_nemsio%nfsecondn,nmeta=meta_nemsio%nmeta, & - & nfsecondd=meta_nemsio%nfsecondd,extrameta=.true., & - & nmetaaryi=meta_nemsio%nmetaaryi,recname=meta_nemsio%recname, & - & nmetavari=meta_nemsio%nmetavari,variname=meta_nemsio%variname, & - & varival=meta_nemsio%varival,varrval=meta_nemsio%varrval, & - & nmetavarr=meta_nemsio%nmetavarr,varrname=meta_nemsio%varrname, & - & reclevtyp=meta_nemsio%reclevtyp, & - & reclev=meta_nemsio%reclev,aryiname=meta_nemsio%aryiname, & - & aryilen=meta_nemsio%aryilen) - print*,'iret=',nemsio_iret - end subroutine nems_write_init - - -!------------------------------------------------------ - subroutine nems_write(gfile,recname,reclevtyp,level,dimx,data2d,iret) - - implicit none - type(nemsio_gfile) :: gfile - integer :: iret,level,dimx - real :: data2d(dimx) - character(nemsio_charkind) :: recname, reclevtyp - - call nemsio_writerecv(gfile,recname,levtyp=reclevtyp,lev=level,data=data2d,iret=iret) - if (iret.NE.0) then - print*,'error writing',recname,level,iret - STOP - ENDIF - - end subroutine nems_write - - -end module fv3_module diff --git a/sorc/fv3nc2nemsio.fd/kinds.f90 b/sorc/fv3nc2nemsio.fd/kinds.f90 deleted file mode 100644 index b3378bfccf..0000000000 --- a/sorc/fv3nc2nemsio.fd/kinds.f90 +++ /dev/null @@ -1,107 +0,0 @@ -! this module was extracted from the GSI version operational -! at NCEP in Dec. 2007. -module kinds -!$$$ module documentation block -! . . . . -! module: kinds -! prgmmr: treadon org: np23 date: 2004-08-15 -! -! abstract: Module to hold specification kinds for variable declaration. -! This module is based on (copied from) Paul vanDelst's -! type_kinds module found in the community radiative transfer -! model -! -! module history log: -! 2004-08-15 treadon -! -! Subroutines Included: -! -! Functions Included: -! -! remarks: -! The numerical data types defined in this module are: -! i_byte - specification kind for byte (1-byte) integer variable -! i_short - specification kind for short (2-byte) integer variable -! i_long - specification kind for long (4-byte) integer variable -! i_llong - specification kind for double long (8-byte) integer variable -! r_single - specification kind for single precision (4-byte) real variable -! r_double - specification kind for double precision (8-byte) real variable -! r_quad - specification kind for quad precision (16-byte) real variable -! -! i_kind - generic specification kind for default integer -! r_kind - generic specification kind for default floating point -! -! -! attributes: -! language: f90 -! machine: ibm RS/6000 SP -! -!$$$ end documentation block - implicit none - private - -! Integer type definitions below - -! Integer types - integer, parameter, public :: i_byte = selected_int_kind(1) ! byte integer - integer, parameter, public :: i_short = selected_int_kind(4) ! short integer - integer, parameter, public :: i_long = selected_int_kind(8) ! long integer - integer, parameter, private :: llong_t = selected_int_kind(16) ! llong integer - integer, parameter, public :: i_llong = max( llong_t, i_long ) - -! Expected 8-bit byte sizes of the integer kinds - integer, parameter, public :: num_bytes_for_i_byte = 1 - integer, parameter, public :: num_bytes_for_i_short = 2 - integer, parameter, public :: num_bytes_for_i_long = 4 - integer, parameter, public :: num_bytes_for_i_llong = 8 - -! Define arrays for default definition - integer, parameter, private :: num_i_kinds = 4 - integer, parameter, dimension( num_i_kinds ), private :: integer_types = (/ & - i_byte, i_short, i_long, i_llong /) - integer, parameter, dimension( num_i_kinds ), private :: integer_byte_sizes = (/ & - num_bytes_for_i_byte, num_bytes_for_i_short, & - num_bytes_for_i_long, num_bytes_for_i_llong /) - -! Default values -! **** CHANGE THE FOLLOWING TO CHANGE THE DEFAULT INTEGER TYPE KIND *** - integer, parameter, private :: default_integer = 3 ! 1=byte, - ! 2=short, - ! 3=long, - ! 4=llong - integer, parameter, public :: i_kind = integer_types( default_integer ) - integer, parameter, public :: num_bytes_for_i_kind = & - integer_byte_sizes( default_integer ) - - -! Real definitions below - -! Real types - integer, parameter, public :: r_single = selected_real_kind(6) ! single precision - integer, parameter, public :: r_double = selected_real_kind(15) ! double precision - integer, parameter, private :: quad_t = selected_real_kind(20) ! quad precision - integer, parameter, public :: r_quad = max( quad_t, r_double ) - -! Expected 8-bit byte sizes of the real kinds - integer, parameter, public :: num_bytes_for_r_single = 4 - integer, parameter, public :: num_bytes_for_r_double = 8 - integer, parameter, public :: num_bytes_for_r_quad = 16 - -! Define arrays for default definition - integer, parameter, private :: num_r_kinds = 3 - integer, parameter, dimension( num_r_kinds ), private :: real_kinds = (/ & - r_single, r_double, r_quad /) - integer, parameter, dimension( num_r_kinds ), private :: real_byte_sizes = (/ & - num_bytes_for_r_single, num_bytes_for_r_double, & - num_bytes_for_r_quad /) - -! Default values -! **** CHANGE THE FOLLOWING TO CHANGE THE DEFAULT REAL TYPE KIND *** - integer, parameter, private :: default_real = 1 ! 1=single, - ! 2=double, - ! 3=quad - integer, parameter, public :: r_kind = real_kinds( default_real ) - integer, parameter, public :: num_bytes_for_r_kind = & - real_byte_sizes( default_real ) - -end module kinds diff --git a/sorc/fv3nc2nemsio.fd/makefile.sh b/sorc/fv3nc2nemsio.fd/makefile.sh deleted file mode 100755 index 30c60cf7f0..0000000000 --- a/sorc/fv3nc2nemsio.fd/makefile.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/ksh -set -x - -machine=${1:-"cray"} - -source ../../modulefiles/module-setup.sh.inc -module use ../../modulefiles -module load modulefile.fv3nc2nemsio.$machine - -LIBnetcdf=$($NETCDF/bin/nf-config --flibs) -INCnetcdf=$($NETCDF/bin/nf-config --fflags) -export NETCDF_LDFLAGS=$LIBnetcdf -export NETCDF_INCLUDE=$INCnetcdf - - -$FCMP $FFLAGS -c kinds.f90 -$FCMP $FFLAGS -c constants.f90 -$FCMP $FFLAGS $NETCDF_INCLUDE -I $NEMSIO_INC -c fv3_module.f90 -$FCMP $FFLAGS $NETCDF_INCLUDE -I $NEMSIO_INC -I. -o fv3nc2nemsio.x fv3_main.f90 fv3_module.o $NETCDF_LDFLAGS $NEMSIO_LIB $BACIO_LIB4 $W3NCO_LIBd - -mv fv3nc2nemsio.x ../../exec/. -rm -f *.o *.mod - -exit 0 diff --git a/sorc/gaussian_sfcanl.fd/.gitignore b/sorc/gaussian_sfcanl.fd/.gitignore deleted file mode 100644 index 0a4391755c..0000000000 --- a/sorc/gaussian_sfcanl.fd/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -*.o -*.mod -*.exe diff --git a/sorc/gaussian_sfcanl.fd/Makefile b/sorc/gaussian_sfcanl.fd/Makefile deleted file mode 100755 index 69cd35f7ae..0000000000 --- a/sorc/gaussian_sfcanl.fd/Makefile +++ /dev/null @@ -1,22 +0,0 @@ -SHELL= /bin/sh - -CMD= gaussian_sfcanl.exe - -OBJS = gaussian_sfcanl.o - -build: $(CMD) - -$(CMD): $(OBJS) - $(FCOMP) $(FFLAGS) -I$(NEMSIO_INC) $(NETCDF_INCLUDE) -o $(CMD) $(OBJS) $(NETCDF_LDFLAGS_F) $(NEMSIO_LIB) $(BACIO_LIB4) $(W3NCO_LIBd) $(SP_LIB4) - -gaussian_sfcanl.o: gaussian_sfcanl.f90 - $(FCOMP) $(FFLAGS) -I$(NEMSIO_INC) $(NETCDF_INCLUDE) -c gaussian_sfcanl.f90 - -install: - cp ${CMD} ../../exec - -clean: - rm -f *.o *.mod ${CMD} ../../exec/${CMD} - -test: - @echo NO TESTS YET diff --git a/sorc/gaussian_sfcanl.fd/gaussian_sfcanl.f90 b/sorc/gaussian_sfcanl.fd/gaussian_sfcanl.f90 deleted file mode 100644 index acce575cd7..0000000000 --- a/sorc/gaussian_sfcanl.fd/gaussian_sfcanl.f90 +++ /dev/null @@ -1,2093 +0,0 @@ -!------------------------------------------------------------------ -! -! Read in surface and nst data on the cubed-sphere grid, -! interpolate it to the gaussian grid, and output the result -! to a nemsio or netcdf file. To not process nst data, -! set flag 'donst' to 'no'. To process nst, set to 'yes'. -! To output gaussian file in netcdf, set netcdf_out=.true. -! Otherwise, nemsio format will be output. -! -! Input files: -! ------------ -! weights.nc Interpolation weights. netcdf format -! anal.tile[1-6].nc fv3 surface restart files -! orog.tile[1-6].nc fv3 orography files -! fort.41 namelist Configuration namelist -! vcoord.txt Vertical coordinate definition file -! (ascii) -! -! Output files: -! ------------- -! sfc.gaussian.analysis.file surface data on gaussian grid - -! nemsio or netcdf. -! -! Namelist variables: -! ------------------- -! yy/mm/dd/hh year/month/day/hour of data. -! i/jgaus i/j dimension of gaussian grid. -! donst When 'no' do not process nst data. -! When 'yes' process nst data. -! netcdf_out When 'true', output gaussian file in -! netcdf. Otherwise output nemsio format. -! -! 2018-Jan-30 Gayno Initial version -! 2019-Oct-30 Gayno Option to output gaussian analysis file -! in netcdf. -! -!------------------------------------------------------------------ - - module io - - use nemsio_module - - implicit none - - character(len=3) :: donst - - integer, parameter :: num_tiles = 6 - - integer :: itile, jtile, igaus, jgaus - - integer(nemsio_intkind) :: idate(8) - - type :: sfc_data -! surface variables - real, allocatable :: alvsf(:) - real, allocatable :: alvwf(:) - real, allocatable :: alnsf(:) - real, allocatable :: alnwf(:) - real, allocatable :: canopy(:) - real, allocatable :: facsf(:) - real, allocatable :: facwf(:) - real, allocatable :: ffhh(:) - real, allocatable :: ffmm(:) - real, allocatable :: fice(:) - real, allocatable :: f10m(:) - real, allocatable :: hice(:) - real, allocatable :: q2m(:) - real, allocatable :: orog(:) - real, allocatable :: sheleg(:) - real, allocatable :: slmask(:) - real, allocatable :: shdmax(:) - real, allocatable :: shdmin(:) - real, allocatable :: slope(:) - real, allocatable :: srflag(:) - real, allocatable :: snoalb(:) - real, allocatable :: snwdph(:) - real, allocatable :: stype(:) - real, allocatable :: t2m(:) - real, allocatable :: tprcp(:) - real, allocatable :: tisfc(:) - real, allocatable :: tsea(:) - real, allocatable :: tg3(:) - real, allocatable :: uustar(:) - real, allocatable :: vfrac(:) - real, allocatable :: vtype(:) - real, allocatable :: zorl(:) - real, allocatable :: slc(:,:) - real, allocatable :: smc(:,:) - real, allocatable :: stc(:,:) -! nst variables - real, allocatable :: c0(:) - real, allocatable :: cd(:) - real, allocatable :: dconv(:) - real, allocatable :: dtcool(:) - real, allocatable :: land(:) - real, allocatable :: qrain(:) - real, allocatable :: tref(:) - real, allocatable :: w0(:) - real, allocatable :: wd(:) - real, allocatable :: xs(:) - real, allocatable :: xt(:) - real, allocatable :: xtts(:) - real, allocatable :: xu(:) - real, allocatable :: xv(:) - real, allocatable :: xz(:) - real, allocatable :: xzts(:) - real, allocatable :: zc(:) - end type sfc_data - - type(sfc_data) :: tile_data, gaussian_data - - end module io - -!------------------------------------------------------------------------------ -! Main program -!------------------------------------------------------------------------------ - - program main - - use netcdf - use io - - implicit none - - character(len=12) :: weightfile - - integer :: i, error, ncid, id_ns, n_s - integer :: id_col, id_row, id_s, n - integer :: yy, mm, dd, hh - integer, allocatable :: col(:), row(:) - - logical :: netcdf_out - - real(kind=8), allocatable :: s(:) - - namelist /setup/ yy, mm, dd, hh, igaus, jgaus, donst, netcdf_out - - call w3tagb('GAUSSIAN_SFCANL',2018,0179,0055,'NP20') - - print*,"- BEGIN EXECUTION" - - netcdf_out = .true. - - donst = 'no' - - print* - print*,"- READ SETUP NAMELIST" - open(41, file="./fort.41") - read(41, nml=setup, iostat=error) - if (error /= 0) then - print*,"** FATAL ERROR READING NAMELIST. ISTAT IS: ", error - call errexit(56) - endif - close (41) - - idate = 0 - idate(1) = yy - idate(2) = mm - idate(3) = dd - idate(4) = hh - -!------------------------------------------------------------------------------ -! Read interpolation weight file. -!------------------------------------------------------------------------------ - - print* - print*,"- READ INTERPOLATION WEIGHT FILE" - - weightfile = "./weights.nc" - - error=nf90_open(trim(weightfile),nf90_nowrite,ncid) - call netcdf_err(error, 'OPENING weights.nc' ) - - error=nf90_inq_dimid(ncid, 'n_s', id_ns) - call netcdf_err(error, 'READING n_s id' ) - error=nf90_inquire_dimension(ncid,id_ns,len=n_s) - call netcdf_err(error, 'READING n_s' ) - - allocate(col(n_s)) - error=nf90_inq_varid(ncid, 'col', id_col) - call netcdf_err(error, 'READING col id' ) - error=nf90_get_var(ncid, id_col, col) - call netcdf_err(error, 'READING col' ) - - allocate(row(n_s)) - error=nf90_inq_varid(ncid, 'row', id_row) - call netcdf_err(error, 'READING row id' ) - error=nf90_get_var(ncid, id_row, row) - call netcdf_err(error, 'READING row' ) - - allocate(s(n_s)) - error=nf90_inq_varid(ncid, 'S', id_s) - call netcdf_err(error, 'READING s id' ) - error=nf90_get_var(ncid, id_s, s) - call netcdf_err(error, 'READING s' ) - - error = nf90_close(ncid) - -!------------------------------------------------------------------------------ -! Read the tiled analysis data. -!------------------------------------------------------------------------------ - - call read_data_anl - -!------------------------------------------------------------------------------ -! Interpolate tiled data to gaussian grid. -!------------------------------------------------------------------------------ - - allocate(gaussian_data%orog(igaus*jgaus)) ! sfc - allocate(gaussian_data%t2m(igaus*jgaus)) - allocate(gaussian_data%tisfc(igaus*jgaus)) - allocate(gaussian_data%q2m(igaus*jgaus)) - allocate(gaussian_data%stype(igaus*jgaus)) - allocate(gaussian_data%snwdph(igaus*jgaus)) - allocate(gaussian_data%slope(igaus*jgaus)) - allocate(gaussian_data%shdmax(igaus*jgaus)) - allocate(gaussian_data%shdmin(igaus*jgaus)) - allocate(gaussian_data%snoalb(igaus*jgaus)) - allocate(gaussian_data%slmask(igaus*jgaus)) - allocate(gaussian_data%tg3(igaus*jgaus)) - allocate(gaussian_data%alvsf(igaus*jgaus)) - allocate(gaussian_data%alvwf(igaus*jgaus)) - allocate(gaussian_data%alnsf(igaus*jgaus)) - allocate(gaussian_data%alnwf(igaus*jgaus)) - allocate(gaussian_data%facsf(igaus*jgaus)) - allocate(gaussian_data%facwf(igaus*jgaus)) - allocate(gaussian_data%ffhh(igaus*jgaus)) - allocate(gaussian_data%ffmm(igaus*jgaus)) - allocate(gaussian_data%sheleg(igaus*jgaus)) - allocate(gaussian_data%canopy(igaus*jgaus)) - allocate(gaussian_data%vfrac(igaus*jgaus)) - allocate(gaussian_data%vtype(igaus*jgaus)) - allocate(gaussian_data%zorl(igaus*jgaus)) - allocate(gaussian_data%tsea(igaus*jgaus)) - allocate(gaussian_data%f10m(igaus*jgaus)) - allocate(gaussian_data%tprcp(igaus*jgaus)) - allocate(gaussian_data%uustar(igaus*jgaus)) - allocate(gaussian_data%fice(igaus*jgaus)) - allocate(gaussian_data%hice(igaus*jgaus)) - allocate(gaussian_data%srflag(igaus*jgaus)) - allocate(gaussian_data%slc(igaus*jgaus,4)) - allocate(gaussian_data%smc(igaus*jgaus,4)) - allocate(gaussian_data%stc(igaus*jgaus,4)) - - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - allocate(gaussian_data%c0(igaus*jgaus)) ! nst - allocate(gaussian_data%cd(igaus*jgaus)) - allocate(gaussian_data%dconv(igaus*jgaus)) - allocate(gaussian_data%dtcool(igaus*jgaus)) - allocate(gaussian_data%land(igaus*jgaus)) - allocate(gaussian_data%qrain(igaus*jgaus)) - allocate(gaussian_data%tref(igaus*jgaus)) - allocate(gaussian_data%w0(igaus*jgaus)) - allocate(gaussian_data%wd(igaus*jgaus)) - allocate(gaussian_data%xs(igaus*jgaus)) - allocate(gaussian_data%xt(igaus*jgaus)) - allocate(gaussian_data%xtts(igaus*jgaus)) - allocate(gaussian_data%xu(igaus*jgaus)) - allocate(gaussian_data%xv(igaus*jgaus)) - allocate(gaussian_data%xz(igaus*jgaus)) - allocate(gaussian_data%xzts(igaus*jgaus)) - allocate(gaussian_data%zc(igaus*jgaus)) - endif - - do i = 1, n_s - gaussian_data%orog(row(i)) = gaussian_data%orog(row(i)) + s(i)*tile_data%orog(col(i)) - gaussian_data%t2m(row(i)) = gaussian_data%t2m(row(i)) + s(i)*tile_data%t2m(col(i)) - gaussian_data%tisfc(row(i)) = gaussian_data%tisfc(row(i)) + s(i)*tile_data%tisfc(col(i)) - gaussian_data%q2m(row(i)) = gaussian_data%q2m(row(i)) + s(i)*tile_data%q2m(col(i)) - gaussian_data%stype(row(i)) = gaussian_data%stype(row(i)) + s(i)*tile_data%stype(col(i)) - gaussian_data%snwdph(row(i)) = gaussian_data%snwdph(row(i)) + s(i)*tile_data%snwdph(col(i)) - gaussian_data%slope(row(i)) = gaussian_data%slope(row(i)) + s(i)*tile_data%slope(col(i)) - gaussian_data%shdmax(row(i)) = gaussian_data%shdmax(row(i)) + s(i)*tile_data%shdmax(col(i)) - gaussian_data%shdmin(row(i)) = gaussian_data%shdmin(row(i)) + s(i)*tile_data%shdmin(col(i)) - gaussian_data%slmask(row(i)) = gaussian_data%slmask(row(i)) + s(i)*tile_data%slmask(col(i)) - gaussian_data%tg3(row(i)) = gaussian_data%tg3(row(i)) + s(i)*tile_data%tg3(col(i)) - gaussian_data%alvsf(row(i)) = gaussian_data%alvsf(row(i)) + s(i)*tile_data%alvsf(col(i)) - gaussian_data%alvwf(row(i)) = gaussian_data%alvwf(row(i)) + s(i)*tile_data%alvwf(col(i)) - gaussian_data%alnsf(row(i)) = gaussian_data%alnsf(row(i)) + s(i)*tile_data%alnsf(col(i)) - gaussian_data%alnwf(row(i)) = gaussian_data%alnwf(row(i)) + s(i)*tile_data%alnwf(col(i)) - gaussian_data%sheleg(row(i)) = gaussian_data%sheleg(row(i)) + s(i)*tile_data%sheleg(col(i)) - gaussian_data%canopy(row(i)) = gaussian_data%canopy(row(i)) + s(i)*tile_data%canopy(col(i)) - gaussian_data%vfrac(row(i)) = gaussian_data%vfrac(row(i)) + s(i)*tile_data%vfrac(col(i)) - gaussian_data%zorl(row(i)) = gaussian_data%zorl(row(i)) + s(i)*tile_data%zorl(col(i)) - gaussian_data%tsea(row(i)) = gaussian_data%tsea(row(i)) + s(i)*tile_data%tsea(col(i)) - gaussian_data%f10m(row(i)) = gaussian_data%f10m(row(i)) + s(i)*tile_data%f10m(col(i)) - gaussian_data%vtype(row(i)) = gaussian_data%vtype(row(i)) + s(i)*tile_data%vtype(col(i)) - gaussian_data%tprcp(row(i)) = gaussian_data%tprcp(row(i)) + s(i)*tile_data%tprcp(col(i)) - gaussian_data%facsf(row(i)) = gaussian_data%facsf(row(i)) + s(i)*tile_data%facsf(col(i)) - gaussian_data%facwf(row(i)) = gaussian_data%facwf(row(i)) + s(i)*tile_data%facwf(col(i)) - gaussian_data%ffhh(row(i)) = gaussian_data%ffhh(row(i)) + s(i)*tile_data%ffhh(col(i)) - gaussian_data%ffmm(row(i)) = gaussian_data%ffmm(row(i)) + s(i)*tile_data%ffmm(col(i)) - gaussian_data%uustar(row(i)) = gaussian_data%uustar(row(i)) + s(i)*tile_data%uustar(col(i)) - gaussian_data%fice(row(i)) = gaussian_data%fice(row(i)) + s(i)*tile_data%fice(col(i)) - gaussian_data%hice(row(i)) = gaussian_data%hice(row(i)) + s(i)*tile_data%hice(col(i)) - gaussian_data%snoalb(row(i)) = gaussian_data%snoalb(row(i)) + s(i)*tile_data%snoalb(col(i)) - gaussian_data%srflag(row(i)) = gaussian_data%srflag(row(i)) + s(i)*tile_data%srflag(col(i)) - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - gaussian_data%c0(row(i)) = gaussian_data%c0(row(i)) + s(i)*tile_data%c0(col(i)) - gaussian_data%cd(row(i)) = gaussian_data%cd(row(i)) + s(i)*tile_data%cd(col(i)) - gaussian_data%dconv(row(i)) = gaussian_data%dconv(row(i)) + s(i)*tile_data%dconv(col(i)) - gaussian_data%dtcool(row(i)) = gaussian_data%dtcool(row(i)) + s(i)*tile_data%dtcool(col(i)) - gaussian_data%qrain(row(i)) = gaussian_data%qrain(row(i)) + s(i)*tile_data%qrain(col(i)) - gaussian_data%tref(row(i)) = gaussian_data%tref(row(i)) + s(i)*tile_data%tref(col(i)) - gaussian_data%w0(row(i)) = gaussian_data%w0(row(i)) + s(i)*tile_data%w0(col(i)) - gaussian_data%wd(row(i)) = gaussian_data%wd(row(i)) + s(i)*tile_data%wd(col(i)) - gaussian_data%xs(row(i)) = gaussian_data%xs(row(i)) + s(i)*tile_data%xs(col(i)) - gaussian_data%xt(row(i)) = gaussian_data%xt(row(i)) + s(i)*tile_data%xt(col(i)) - gaussian_data%xtts(row(i)) = gaussian_data%xtts(row(i)) + s(i)*tile_data%xtts(col(i)) - gaussian_data%xu(row(i)) = gaussian_data%xu(row(i)) + s(i)*tile_data%xu(col(i)) - gaussian_data%xv(row(i)) = gaussian_data%xv(row(i)) + s(i)*tile_data%xv(col(i)) - gaussian_data%xz(row(i)) = gaussian_data%xz(row(i)) + s(i)*tile_data%xz(col(i)) - gaussian_data%xzts(row(i)) = gaussian_data%xzts(row(i)) + s(i)*tile_data%xzts(col(i)) - gaussian_data%zc(row(i)) = gaussian_data%zc(row(i)) + s(i)*tile_data%zc(col(i)) - endif - do n = 1, 4 - gaussian_data%slc(row(i),n) = gaussian_data%slc(row(i),n) + s(i)*tile_data%slc(col(i),n) - gaussian_data%smc(row(i),n) = gaussian_data%smc(row(i),n) + s(i)*tile_data%smc(col(i),n) - gaussian_data%stc(row(i),n) = gaussian_data%stc(row(i),n) + s(i)*tile_data%stc(col(i),n) - enddo - enddo - - deallocate(col, row, s) - - deallocate(tile_data%orog) - deallocate(tile_data%t2m) - deallocate(tile_data%tisfc) - deallocate(tile_data%q2m) - deallocate(tile_data%stype) - deallocate(tile_data%snwdph) - deallocate(tile_data%slope) - deallocate(tile_data%shdmax) - deallocate(tile_data%shdmin) - deallocate(tile_data%snoalb) - deallocate(tile_data%slmask) - deallocate(tile_data%tg3) - deallocate(tile_data%alvsf) - deallocate(tile_data%alvwf) - deallocate(tile_data%alnsf) - deallocate(tile_data%alnwf) - deallocate(tile_data%facsf) - deallocate(tile_data%facwf) - deallocate(tile_data%ffhh) - deallocate(tile_data%ffmm) - deallocate(tile_data%sheleg) - deallocate(tile_data%canopy) - deallocate(tile_data%vfrac) - deallocate(tile_data%vtype) - deallocate(tile_data%zorl) - deallocate(tile_data%tsea) - deallocate(tile_data%f10m) - deallocate(tile_data%tprcp) - deallocate(tile_data%uustar) - deallocate(tile_data%fice) - deallocate(tile_data%hice) - deallocate(tile_data%srflag) - deallocate(tile_data%slc) - deallocate(tile_data%smc) - deallocate(tile_data%stc) - - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - deallocate(tile_data%c0) - deallocate(tile_data%cd) - deallocate(tile_data%dconv) - deallocate(tile_data%dtcool) - deallocate(tile_data%qrain) - deallocate(tile_data%tref) - deallocate(tile_data%w0) - deallocate(tile_data%wd) - deallocate(tile_data%xs) - deallocate(tile_data%xt) - deallocate(tile_data%xtts) - deallocate(tile_data%xu) - deallocate(tile_data%xv) - deallocate(tile_data%xz) - deallocate(tile_data%xzts) - deallocate(tile_data%zc) - endif - -!------------------------------------------------------------------------------ -! Write gaussian data to either netcdf or nemsio file. -!------------------------------------------------------------------------------ - - if (netcdf_out) then - call write_sfc_data_netcdf - else - call write_sfc_data_nemsio - endif - - deallocate(gaussian_data%orog) - deallocate(gaussian_data%t2m) - deallocate(gaussian_data%tisfc) - deallocate(gaussian_data%q2m) - deallocate(gaussian_data%stype) - deallocate(gaussian_data%snwdph) - deallocate(gaussian_data%slope) - deallocate(gaussian_data%shdmax) - deallocate(gaussian_data%shdmin) - deallocate(gaussian_data%snoalb) - deallocate(gaussian_data%slmask) - deallocate(gaussian_data%tg3) - deallocate(gaussian_data%alvsf) - deallocate(gaussian_data%alvwf) - deallocate(gaussian_data%alnsf) - deallocate(gaussian_data%alnwf) - deallocate(gaussian_data%facsf) - deallocate(gaussian_data%facwf) - deallocate(gaussian_data%ffhh) - deallocate(gaussian_data%ffmm) - deallocate(gaussian_data%sheleg) - deallocate(gaussian_data%canopy) - deallocate(gaussian_data%vfrac) - deallocate(gaussian_data%vtype) - deallocate(gaussian_data%zorl) - deallocate(gaussian_data%tsea) - deallocate(gaussian_data%f10m) - deallocate(gaussian_data%tprcp) - deallocate(gaussian_data%uustar) - deallocate(gaussian_data%fice) - deallocate(gaussian_data%hice) - deallocate(gaussian_data%srflag) - deallocate(gaussian_data%slc) - deallocate(gaussian_data%smc) - deallocate(gaussian_data%stc) - - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - deallocate(gaussian_data%c0) - deallocate(gaussian_data%cd) - deallocate(gaussian_data%dconv) - deallocate(gaussian_data%dtcool) - deallocate(gaussian_data%land) - deallocate(gaussian_data%qrain) - deallocate(gaussian_data%tref) - deallocate(gaussian_data%w0) - deallocate(gaussian_data%wd) - deallocate(gaussian_data%xs) - deallocate(gaussian_data%xt) - deallocate(gaussian_data%xtts) - deallocate(gaussian_data%xu) - deallocate(gaussian_data%xv) - deallocate(gaussian_data%xz) - deallocate(gaussian_data%xzts) - deallocate(gaussian_data%zc) - endif - - print* - print*,'- NORMAL TERMINATION' - - call w3tage('GAUSSIAN_SFCANL') - - end program main - -!------------------------------------------------------------------------------------------- -! Write gaussian surface data to netcdf file. -!------------------------------------------------------------------------------------------- - - subroutine write_sfc_data_netcdf - - use netcdf - use io - - implicit none - - character(len=50) :: outfile - character(len=31) :: date_string - character(len=4) :: year - character(len=2) :: mon, day, hour - - integer :: header_buffer_val = 16384 - integer :: i, error, ncid, dim_xt, dim_yt, dim_time - integer :: id_xt, id_yt, id_lon, id_lat, id_time - integer :: n - -! noah variables - integer, parameter :: num_noah=44 - character(len=30) :: noah_var(num_noah) - character(len=70) :: noah_name(num_noah) - character(len=30) :: noah_units(num_noah) - -! nst variables - integer, parameter :: num_nst=16 - character(len=30) :: nst_var(num_nst) - character(len=70) :: nst_name(num_nst) - character(len=30) :: nst_units(num_nst) - -! variables to be output - integer :: num_vars - character(len=30), allocatable :: var(:) - character(len=70), allocatable :: name(:) - character(len=30), allocatable :: units(:) - integer, allocatable :: id_var(:) - - real, parameter :: missing = 9.99e20 - - real(kind=4), allocatable :: dummy(:,:), slat(:), wlat(:) - -! define noah fields - - data noah_var /"alnsf", & - "alnwf", & - "alvsf", & - "alvwf", & - "cnwat", & - "crain",& - "f10m", & - "facsf", & - "facwf", & - "ffhh", & - "ffmm", & - "fricv", & - "icec", & - "icetk", & - "land", & - "orog", & - "sfcr", & - "shdmax", & - "shdmin", & - "sltyp", & - "snoalb", & - "snod", & - "soill1", & - "soill2", & - "soill3", & - "soill4", & - "soilt1", & - "soilt2", & - "soilt3", & - "soilt4", & - "soilw1", & - "soilw2", & - "soilw3", & - "soilw4", & - "sotyp", & - "spfh2m", & - "tg3" , & - "tisfc", & - "tmp2m", & - "tmpsfc", & - "tprcp", & - "veg", & - "vtype", & - "weasd" / - - data noah_name /"mean nir albedo with strong cosz dependency", & - "mean nir albedo with weak cosz dependency", & - "mean vis albedo with strong cosz dependency", & - "mean vis albedo with weak cosz dependency", & - "canopy water (cnwat in gfs data)" , & - "instantaneous categorical rain", & - "10-meter wind speed divided by lowest model wind speed", & - "fractional coverage with strong cosz dependency", & - "fractional coverage with weak cosz dependency", & - "fh parameter from PBL scheme" , & - "fm parameter from PBL scheme" , & - "uustar surface frictional wind", & - "surface ice concentration (ice=1; no ice=0)", & - "sea ice thickness (icetk in gfs_data)", & - "sea-land-ice mask (0-sea, 1-land, 2-ice)", & - "surface geopotential height", & - "surface roughness", & - "maximum fractional coverage of green vegetation", & - "minimum fractional coverage of green vegetation", & - "surface slope type" , & - "maximum snow albedo in fraction", & - "surface snow depth", & - "liquid soil moisture at layer-1", & - "liquid soil moisture at layer-2", & - "liquid soil moisture at layer-3", & - "liquid soil moisture at layer-4", & - "soil temperature 0-10cm", & - "soil temperature 10-40cm", & - "soil temperature 40-100cm", & - "soil temperature 100-200cm", & - "volumetric soil moisture 0-10cm", & - "volumetric soil moisture 10-40cm", & - "volumetric soil moisture 40-100cm", & - "volumetric soil moisture 100-200cm", & - "soil type in integer", & - "2m specific humidity" , & - "deep soil temperature" , & - "surface temperature over ice fraction", & - "2m temperature", & - "surface temperature", & - "total precipitation" , & - "vegetation fraction", & - "vegetation type in integer", & - "surface snow water equivalent" / - - data noah_units /"%", & - "%", & - "%", & - "%", & - "XXX", & - "number", & - "N/A", & - "XXX", & - "XXX", & - "XXX", & - "XXX", & - "XXX", & - "fraction", & - "XXX", & - "numerical", & - "gpm", & - "m", & - "XXX", & - "XXX", & - "XXX", & - "XXX", & - "m", & - "XXX", & - "XXX", & - "XXX", & - "XXX", & - "K", & - "K", & - "K", & - "K", & - "fraction", & - "fraction", & - "fraction", & - "fraction", & - "number", & - "kg/kg", & - "K", & - "K", & - "K", & - "K", & - "kg/m**2", & - "fraction", & - "number" , & - "kg/m**2" / - -! define nst fields - - data nst_var /"c0", & - "cd", & - "dconv", & - "dtcool", & - "qrain", & - "tref", & - "w0", & - "wd", & - "xs", & - "xt", & - "xtts", & - "xu", & - "xv", & - "xz", & - "xzts", & - "zc" / - - data nst_name /"nsst coefficient1 to calculate d(tz)/d(ts)", & - "nsst coefficient2 to calculate d(tz)/d(ts)", & - "nsst thickness of free convection layer", & - "nsst sub-layer cooling amount", & - "nsst sensible heat flux due to rainfall", & - "nsst reference or foundation temperature", & - "nsst coefficient3 to calculate d(tz)/d(ts)", & - "nsst coefficient4 to calculate d(tz)/d(ts)", & - "nsst salinity content in diurnal thermocline layer", & - "nsst heat content in diurnal thermocline layer", & - "nsst d(xt)/d(ts)", & - "nsst u-current content in diurnal thermocline layer", & - "nsst v-current content in diurnal thermocline layer", & - "nsst diurnal thermocline layer thickness", & - "nsst d(xt)/d(ts)", & - "nsst sub-layer cooling thickness"/ - - data nst_units /"numerical", & - "n/a", & - "m", & - "k", & - "w/m2", & - "K", & - "n/a", & - "n/a", & - "n/a", & - "k*m", & - "m", & - "m2/s", & - "m2/s", & - "m", & - "m/k", & - "m"/ - - outfile = "./sfc.gaussian.analysis.file" - - print*,"- WRITE SURFACE DATA TO NETCDF FILE: ", trim(outfile) - - error = nf90_create(outfile, cmode=IOR(IOR(NF90_CLOBBER,NF90_NETCDF4),NF90_CLASSIC_MODEL), ncid=ncid) - call netcdf_err(error, 'CREATING NETCDF FILE') - -! dimensions - - error = nf90_def_dim(ncid, 'grid_xt', igaus, dim_xt) - call netcdf_err(error, 'DEFINING GRID_XT DIMENSION') - - error = nf90_def_dim(ncid, 'grid_yt', jgaus, dim_yt) - call netcdf_err(error, 'DEFINING GRID_YT DIMENSION') - - error = nf90_def_dim(ncid, 'time', 1, dim_time) - call netcdf_err(error, 'DEFINING TIME DIMENSION') - -! global attributes - - error = nf90_put_att(ncid, nf90_global, 'nsoil', 4) - call netcdf_err(error, 'DEFINING NSOIL ATTRIBUTE') - - error = nf90_put_att(ncid, nf90_global, 'source', "FV3GFS") - call netcdf_err(error, 'DEFINING SOURCE ATTRIBUTE') - - error = nf90_put_att(ncid, nf90_global, 'grid', "gaussian") - call netcdf_err(error, 'DEFINING GRID ATTRIBUTE') - - error = nf90_put_att(ncid, nf90_global, 'im', igaus) - call netcdf_err(error, 'DEFINING IM ATTRIBUTE') - - error = nf90_put_att(ncid, nf90_global, 'jm', jgaus) - call netcdf_err(error, 'DEFINING JM ATTRIBUTE') - -! variables - -! grid_xt - - error = nf90_def_var(ncid, 'grid_xt', NF90_DOUBLE, dim_xt, id_xt) - call netcdf_err(error, 'DEFINING GRID_XT') - - error = nf90_put_att(ncid, id_xt, "cartesian_axis", "X") - call netcdf_err(error, 'DEFINING GRID_XT ATTRIBUTE') - - error = nf90_put_att(ncid, id_xt, "long_name", "T-cell longitude") - call netcdf_err(error, 'DEFINING GRID_XT ATTRIBUTE') - - error = nf90_put_att(ncid, id_xt, "units", "degrees_E") - call netcdf_err(error, 'DEFINING GRID_XT ATTRIBUTE') - -! lon - - error = nf90_def_var(ncid, 'lon', NF90_DOUBLE, (/dim_xt,dim_yt/), id_lon) - call netcdf_err(error, 'DEFINING LON') - - error = nf90_put_att(ncid, id_lon, "long_name", "T-cell longitude") - call netcdf_err(error, 'DEFINING LON ATTRIBUTE') - - error = nf90_put_att(ncid, id_lon, "units", "degrees_E") - call netcdf_err(error, 'DEFINING LON ATTRIBUTE') - -! grid_yt - - error = nf90_def_var(ncid, 'grid_yt', NF90_DOUBLE, dim_yt, id_yt) - call netcdf_err(error, 'DEFINING GRID_YT') - - error = nf90_put_att(ncid, id_yt, "cartesian_axis", "Y") - call netcdf_err(error, 'DEFINING GRID_YT ATTRIBUTE') - - error = nf90_put_att(ncid, id_yt, "long_name", "T-cell latitude") - call netcdf_err(error, 'DEFINING GRID_YT ATTRIBUTE') - - error = nf90_put_att(ncid, id_yt, "units", "degrees_N") - call netcdf_err(error, 'DEFINING GRID_YT ATTRIBUTE') - -! lat - - error = nf90_def_var(ncid, 'lat', NF90_DOUBLE, (/dim_xt,dim_yt/), id_lat) - call netcdf_err(error, 'DEFINING LAT') - - error = nf90_put_att(ncid, id_lat, "long_name", "T-cell latitude") - call netcdf_err(error, 'DEFINING LAT ATTRIBUTE') - - error = nf90_put_att(ncid, id_lat, "units", "degrees_N") - call netcdf_err(error, 'DEFINING LAT ATTRIBUTE') - -! time - - error = nf90_def_var(ncid, 'time', NF90_DOUBLE, dim_time, id_time) - call netcdf_err(error, 'DEFINING TIME') - - error = nf90_put_att(ncid, id_time, "long_name", "time") - call netcdf_err(error, 'DEFINING TIME ATTRIBUTE') - - write(year, "(i4)") idate(1) - write(mon, "(i2.2)") idate(2) - write(day, "(i2.2)") idate(3) - write(hour, "(i2.2)") idate(4) - - date_string="hours since " // year // "-" // mon // "-" // day // " " // hour // ":00:00" - - error = nf90_put_att(ncid, id_time, "units", date_string) - call netcdf_err(error, 'DEFINING TIME ATTRIBUTE') - - error = nf90_put_att(ncid, id_time, "cartesian_axis", "T") - call netcdf_err(error, 'DEFINING TIME ATTRIBUTE') - - error = nf90_put_att(ncid, id_time, "calendar_type", "JULIAN") - call netcdf_err(error, 'DEFINING TIME ATTRIBUTE') - - error = nf90_put_att(ncid, id_time, "calendar", "JULIAN") - call netcdf_err(error, 'DEFINING TIME ATTRIBUTE') - -!------------------------------------------------------------------------------------------- -! Determine what variables to output (noah, or noah plus nst). -!------------------------------------------------------------------------------------------- - - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - num_vars = num_noah + num_nst - else - num_vars = num_noah - endif - - allocate(var(num_vars)) - allocate(name(num_vars)) - allocate(units(num_vars)) - allocate(id_var(num_vars)) - - var(1:num_noah) = noah_var - name(1:num_noah) = noah_name - units(1:num_noah) = noah_units - - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - do n = 1, num_nst - var(n+num_noah) = nst_var(n) - name(n+num_noah) = nst_name(n) - units(n+num_noah) = nst_units(n) - enddo - endif - -!------------------------------------------------------------------------------------------- -! Define variables in netcdf file. -!------------------------------------------------------------------------------------------- - - do n = 1, num_vars - - print*,'- DEFINE VARIABLE ',trim(var(n)) - error = nf90_def_var(ncid, trim(var(n)), NF90_FLOAT, (/dim_xt,dim_yt,dim_time/), id_var(n)) - call netcdf_err(error, 'DEFINING variable') - error = nf90_def_var_deflate(ncid, id_var(n), 1, 1, 1) - call netcdf_err(error, 'DEFINING variable with compression') - - error = nf90_put_att(ncid, id_var(n), "long_name", trim(name(n))) - call netcdf_err(error, 'DEFINING name ATTRIBUTE') - - error = nf90_put_att(ncid, id_var(n), "units", trim(units(n))) - call netcdf_err(error, 'DEFINING units ATTRIBUTE') - - error = nf90_put_att(ncid, id_var(n), "missing", missing) - call netcdf_err(error, 'DEFINING missing ATTRIBUTE') - - error = nf90_put_att(ncid, id_var(n), "cell_methods", "time: point") - call netcdf_err(error, 'DEFINING cell method ATTRIBUTE') - - error = nf90_put_att(ncid, id_var(n), "output_file", "sfc") - call netcdf_err(error, 'DEFINING out file ATTRIBUTE') - - enddo - -! end variable defs - - error = nf90_enddef(ncid, header_buffer_val,4,0,4) - call netcdf_err(error, 'DEFINING HEADER') - -!------------------------------------------------------------------------------------------- -! Write variables to netcdf file. -!------------------------------------------------------------------------------------------- - - allocate(dummy(igaus,jgaus)) - do i = 1, igaus - dummy(i,:) = real((i-1),4) * 360.0_4 / real(igaus,4) - enddo - - error = nf90_put_var(ncid, id_xt, dummy(:,1)) - call netcdf_err(error, 'WRITING GRID_XT') - - error = nf90_put_var(ncid, id_lon, dummy) - call netcdf_err(error, 'WRITING LON') - - allocate(slat(jgaus)) - allocate(wlat(jgaus)) - call splat(4, jgaus, slat, wlat) - - do i = (jgaus/2+1), jgaus - dummy(:,i) = 90.0 - (acos(slat(i)) * 180.0 / (4.0*atan(1.0))) - enddo - - do i = 1, (jgaus/2) - dummy(:,i) = -(dummy(:,(jgaus-i+1))) - enddo - - deallocate(slat, wlat) - - error = nf90_put_var(ncid, id_yt, dummy(1,:)) - call netcdf_err(error, 'WRITING GRID_YT') - - error = nf90_put_var(ncid, id_lat, dummy) - call netcdf_err(error, 'WRITING LAT') - - error = nf90_put_var(ncid, id_time, 0) - call netcdf_err(error, 'WRITING TIME') - - do n = 1, num_vars - print*,'- WRITE VARIABLE ',trim(var(n)) - call get_netcdf_var(var(n), dummy) - error = nf90_put_var(ncid, id_var(n), dummy, start=(/1,1,1/), count=(/igaus,jgaus,1/)) - call netcdf_err(error, 'WRITING variable') - enddo - - deallocate (dummy) - - error = nf90_close(ncid) - - end subroutine write_sfc_data_netcdf - -!------------------------------------------------------------------------------------------- -! Retrieve variable based on its netcdf identifier. -!------------------------------------------------------------------------------------------- - - subroutine get_netcdf_var(var, dummy) - - use io - - implicit none - - character(len=*), intent(in) :: var - - real(kind=4), intent(out) :: dummy(igaus,jgaus) - - select case (var) - case ('alnsf') - dummy = reshape(gaussian_data%alnsf, (/igaus,jgaus/)) - case ('alnwf') - dummy = reshape(gaussian_data%alnwf, (/igaus,jgaus/)) - case ('alvsf') - dummy = reshape(gaussian_data%alvsf, (/igaus,jgaus/)) - case ('alvwf') - dummy = reshape(gaussian_data%alvwf, (/igaus,jgaus/)) - case ('cnwat') - dummy = reshape(gaussian_data%canopy, (/igaus,jgaus/)) - case ('f10m') - dummy = reshape(gaussian_data%f10m, (/igaus,jgaus/)) - case ('facsf') - dummy = reshape(gaussian_data%facsf, (/igaus,jgaus/)) - case ('facwf') - dummy = reshape(gaussian_data%facwf, (/igaus,jgaus/)) - case ('ffhh') - dummy = reshape(gaussian_data%ffhh, (/igaus,jgaus/)) - case ('ffmm') - dummy = reshape(gaussian_data%ffmm, (/igaus,jgaus/)) - case ('fricv') - dummy = reshape(gaussian_data%uustar, (/igaus,jgaus/)) - case ('land') - dummy = reshape(gaussian_data%slmask, (/igaus,jgaus/)) - case ('orog') - dummy = reshape(gaussian_data%orog, (/igaus,jgaus/)) - case ('sltyp') - dummy = reshape(gaussian_data%slope, (/igaus,jgaus/)) - case ('icec') - dummy = reshape(gaussian_data%fice, (/igaus,jgaus/)) - case ('icetk') - dummy = reshape(gaussian_data%hice, (/igaus,jgaus/)) - case ('snoalb') - dummy = reshape(gaussian_data%snoalb, (/igaus,jgaus/)) - case ('shdmin') - dummy = reshape(gaussian_data%shdmin, (/igaus,jgaus/)) - case ('shdmax') - dummy = reshape(gaussian_data%shdmax, (/igaus,jgaus/)) - case ('snod') - dummy = reshape(gaussian_data%snwdph, (/igaus,jgaus/)) / 1000.0 - case ('weasd') - dummy = reshape(gaussian_data%sheleg, (/igaus,jgaus/)) - case ('veg') - dummy = reshape(gaussian_data%vfrac, (/igaus,jgaus/)) * 100.0 - case ('sfcr') - dummy = reshape(gaussian_data%zorl, (/igaus,jgaus/)) / 100.0 - case ('crain') - dummy = reshape(gaussian_data%srflag, (/igaus,jgaus/)) - case ('sotyp') - dummy = reshape(gaussian_data%stype, (/igaus,jgaus/)) - case ('spfh2m') - dummy = reshape(gaussian_data%q2m, (/igaus,jgaus/)) - case ('tmp2m') - dummy = reshape(gaussian_data%t2m, (/igaus,jgaus/)) - case ('tmpsfc') - dummy = reshape(gaussian_data%tsea, (/igaus,jgaus/)) - case ('tg3') - dummy = reshape(gaussian_data%tg3, (/igaus,jgaus/)) - case ('tisfc') - dummy = reshape(gaussian_data%tisfc, (/igaus,jgaus/)) - case ('tprcp') - dummy = reshape(gaussian_data%tprcp, (/igaus,jgaus/)) - case ('vtype') - dummy = reshape(gaussian_data%vtype, (/igaus,jgaus/)) - case ('soill1') - dummy = reshape(gaussian_data%slc(:,1), (/igaus,jgaus/)) - where (dummy > 0.99) dummy = 0.0 ! replace flag value at water/landice - case ('soill2') - dummy = reshape(gaussian_data%slc(:,2), (/igaus,jgaus/)) - where (dummy > 0.99) dummy = 0.0 ! replace flag value at water/landice - case ('soill3') - dummy = reshape(gaussian_data%slc(:,3), (/igaus,jgaus/)) - where (dummy > 0.99) dummy = 0.0 ! replace flag value at water/landice - case ('soill4') - dummy = reshape(gaussian_data%slc(:,4), (/igaus,jgaus/)) - where (dummy > 0.99) dummy = 0.0 ! replace flag value at water/landice - case ('soilt1') - dummy = reshape(gaussian_data%stc(:,1), (/igaus,jgaus/)) - case ('soilt2') - dummy = reshape(gaussian_data%stc(:,2), (/igaus,jgaus/)) - case ('soilt3') - dummy = reshape(gaussian_data%stc(:,3), (/igaus,jgaus/)) - case ('soilt4') - dummy = reshape(gaussian_data%stc(:,4), (/igaus,jgaus/)) - case ('soilw1') - dummy = reshape(gaussian_data%smc(:,1), (/igaus,jgaus/)) - case ('soilw2') - dummy = reshape(gaussian_data%smc(:,2), (/igaus,jgaus/)) - case ('soilw3') - dummy = reshape(gaussian_data%smc(:,3), (/igaus,jgaus/)) - case ('soilw4') - dummy = reshape(gaussian_data%smc(:,4), (/igaus,jgaus/)) - case ('c0') - dummy = reshape(gaussian_data%c0, (/igaus,jgaus/)) - case ('cd') - dummy = reshape(gaussian_data%cd, (/igaus,jgaus/)) - case ('dconv') - dummy = reshape(gaussian_data%dconv, (/igaus,jgaus/)) - case ('dtcool') - dummy = reshape(gaussian_data%dtcool, (/igaus,jgaus/)) - case ('qrain') - dummy = reshape(gaussian_data%qrain, (/igaus,jgaus/)) - case ('tref') - dummy = reshape(gaussian_data%tref, (/igaus,jgaus/)) - case ('w0') - dummy = reshape(gaussian_data%w0, (/igaus,jgaus/)) - case ('wd') - dummy = reshape(gaussian_data%wd, (/igaus,jgaus/)) - case ('xs') - dummy = reshape(gaussian_data%xs, (/igaus,jgaus/)) - case ('xt') - dummy = reshape(gaussian_data%xt, (/igaus,jgaus/)) - case ('xtts') - dummy = reshape(gaussian_data%xtts, (/igaus,jgaus/)) - case ('xu') - dummy = reshape(gaussian_data%xu, (/igaus,jgaus/)) - case ('xv') - dummy = reshape(gaussian_data%xv, (/igaus,jgaus/)) - case ('xz') - dummy = reshape(gaussian_data%xz, (/igaus,jgaus/)) - case ('xzts') - dummy = reshape(gaussian_data%xzts, (/igaus,jgaus/)) - case ('zc') - dummy = reshape(gaussian_data%zc, (/igaus,jgaus/)) - case default - print*,'- FATAL ERROR: UNKNOWN VAR IN GET_VAR: ', var - call errexit(67) - end select - - end subroutine get_netcdf_var - -!------------------------------------------------------------------------------------------- -! Write gaussian surface data to nemsio file. -!------------------------------------------------------------------------------------------- - - subroutine write_sfc_data_nemsio - - use nemsio_module - use io - - implicit none - - integer(nemsio_intkind), parameter :: nrec_all=60 - integer(nemsio_intkind), parameter :: nmetaaryi=1 - integer(nemsio_intkind), parameter :: nmetavari=4 - integer(nemsio_intkind), parameter :: nmetavarr=1 - integer(nemsio_intkind), parameter :: nmetavarc=2 - - character(nemsio_charkind) :: recname_all(nrec_all) - character(nemsio_charkind) :: reclevtyp_all(nrec_all) - character(nemsio_charkind) :: aryiname(nmetaaryi) - character(nemsio_charkind) :: variname(nmetavari) - character(nemsio_charkind) :: varrname(nmetavarr) - character(nemsio_charkind) :: varcname(nmetavarc) - character(nemsio_charkind) :: varcval(nmetavarc) - character(nemsio_charkind), allocatable :: recname(:) - character(nemsio_charkind), allocatable :: reclevtyp(:) - - integer(nemsio_intkind) :: iret, version, nrec - integer(nemsio_intkind) :: reclev_all(nrec_all) - integer(nemsio_intkind) :: aryival(jgaus,nmetaaryi) - integer(nemsio_intkind) :: aryilen(nmetaaryi) - integer(nemsio_intkind) :: varival(nmetavari) - integer :: i, k, n, nvcoord, levs_vcoord - integer(nemsio_intkind), allocatable :: reclev(:) - - real(nemsio_realkind), allocatable :: the_data(:) - real(nemsio_realkind) :: varrval(nmetavarr) - real(nemsio_realkind), allocatable :: lat(:), lon(:) - real(kind=4), allocatable :: dummy(:,:), slat(:), wlat(:) - real(nemsio_realkind), allocatable :: vcoord(:,:,:) - - type(nemsio_gfile) :: gfileo - - data recname_all /'alnsf', 'alnwf', 'alvsf', 'alvwf', & - 'cnwat', 'crain', 'f10m', 'facsf', & - 'facwf', 'ffhh', 'ffmm', 'fricv', & - 'icec', 'icetk', 'land', 'orog', & - 'snoalb', 'sfcr', 'shdmax', 'shdmin', & - 'soill', 'soill', 'soill', 'soill', & - 'sltyp', 'soilw', 'soilw', 'soilw', & - 'soilw', 'snod', 'sotyp', 'spfh', & - 'tmp', 'tmp', 'tmp', 'tmp', & - 'tg3', 'ti', 'tmp', 'tmp', & - 'tprcp', 'veg', 'vtype', 'weasd', & - 'c0', 'cd', 'dconv', 'dtcool', & - 'qrain', 'tref', & - 'w0', 'wd', 'xs', 'xt', & - 'xtts', 'xu', 'xv', 'xz', & - 'xzts', 'zc'/ - - data reclevtyp_all /'sfc', 'sfc', 'sfc', 'sfc', & - 'sfc', 'sfc', '10 m above gnd', 'sfc', & - 'sfc', 'sfc', 'sfc', 'sfc', & - 'sfc', 'sfc', 'sfc', 'sfc', & - 'sfc', 'sfc', 'sfc', 'sfc', & - '0-10 cm down', '10-40 cm down', '40-100 cm down', '100-200 cm down', & - 'sfc', '0-10 cm down', '10-40 cm down', '40-100 cm down', & - '100-200 cm down', 'sfc', 'sfc', '2 m above gnd', & - '0-10 cm down', '10-40 cm down', '40-100 cm down', '100-200 cm down', & - 'sfc', 'sfc', '2 m above gnd', 'sfc', & - 'sfc', 'sfc', 'sfc', 'sfc', & - 'sfc', 'sfc', 'sfc', 'sfc', & - 'sfc', 'sfc', 'sfc', & - 'sfc', 'sfc', 'sfc', 'sfc', & - 'sfc', 'sfc', 'sfc', 'sfc', & - 'sfc'/ - - data reclev_all /1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, & - 1, 1, 1, 1, 1, 1, 1/ - - data aryiname /'lpl'/ - - data variname /'fhzero', 'ncld', 'nsoil', 'imp_physics'/ - - data varival /6, 5, 4, 11/ - - data varrname /'dtp'/ - - data varrval /225.0/ - - data varcname /"y-direction", "z-direction"/ - - data varcval /"north2south", "bottom2top"/ - - version = 200809 - - aryival = igaus ! reduced grid definition - aryilen = jgaus - - allocate(dummy(igaus,jgaus)) - do i = 1, igaus - dummy(i,:) = float(i-1) * 360.0 / float(igaus) - enddo - - allocate(lon(igaus*jgaus)) - lon = reshape (dummy, (/igaus*jgaus/) ) - -! Call 4-byte version of splib to match latitudes in history files. - - allocate(slat(jgaus)) - allocate(wlat(jgaus)) - call splat(4, jgaus, slat, wlat) - - do i = (jgaus/2+1), jgaus - dummy(:,i) = 90.0 - (acos(slat(i)) * 180.0 / (4.0*atan(1.0))) - enddo - - do i = 1, (jgaus/2) - dummy(:,i) = -(dummy(:,(jgaus-i+1))) - enddo - - deallocate(slat, wlat) - - allocate(lat(igaus*jgaus)) - lat = reshape (dummy, (/igaus*jgaus/) ) - - deallocate(dummy) - - print* - print*, "- OPEN VCOORD FILE." - open(14, file="vcoord.txt", form='formatted', iostat=iret) - if (iret /= 0) goto 43 - - print*, "- READ VCOORD FILE." - read(14, *, iostat=iret) nvcoord, levs_vcoord - if (iret /= 0) goto 43 - - allocate(vcoord(levs_vcoord,3,2)) - vcoord = 0.0 - read(14, *, iostat=iret) ((vcoord(n,k,1), k=1,nvcoord), n=1,levs_vcoord) - if (iret /= 0) goto 43 - - close (14) - - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - nrec = nrec_all - allocate(recname(nrec)) - recname = recname_all - allocate(reclevtyp(nrec)) - reclevtyp = reclevtyp_all - allocate(reclev(nrec)) - reclev = reclev_all - else - nrec = 44 - allocate(recname(nrec)) - recname = recname_all(1:nrec) - allocate(reclevtyp(nrec)) - reclevtyp = reclevtyp_all(1:nrec) - allocate(reclev(nrec)) - reclev = reclev_all(1:nrec) - endif - - call nemsio_init(iret=iret) - - print* - print*,"- OPEN GAUSSIAN NEMSIO SURFACE FILE" - - call nemsio_open(gfileo, "sfc.gaussian.analysis.file", 'write', & - modelname="FV3GFS", gdatatype="bin4", version=version, & - nmeta=8, nrec=nrec, dimx=igaus, dimy=jgaus, dimz=(levs_vcoord-1), & - nframe=0, nsoil=4, ntrac=8, jcap=-9999, & - ncldt=5, idvc=-9999, idsl=-9999, idvm=-9999, & - idrt=4, lat=lat, lon=lon, vcoord=vcoord, & - nfhour=0, nfminute=0, nfsecondn=0, & - nfsecondd=1, nfday=0, idate=idate, & - recname=recname, reclevtyp=reclevtyp, & - reclev=reclev, extrameta=.true., & - nmetavari=nmetavari, variname=variname, varival=varival, & - nmetavarr=nmetavarr, varrname=varrname, varrval=varrval, & - nmetavarc=nmetavarc, varcname=varcname, varcval=varcval, & - nmetaaryi=nmetaaryi, aryiname=aryiname, & - aryival=aryival, aryilen=aryilen, iret=iret) - if (iret /= 0) goto 44 - - deallocate (lat, lon, vcoord, recname, reclevtyp, reclev) - - allocate(the_data(igaus*jgaus)) - - print*,"- WRITE GAUSSIAN NEMSIO SURFACE FILE" - - print*,"- WRITE ALNSF" - the_data = gaussian_data%alnsf - call nemsio_writerec(gfileo, 1, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE ALNWF" - the_data = gaussian_data%alnwf - call nemsio_writerec(gfileo, 2, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE ALVSF" - the_data = gaussian_data%alvsf - call nemsio_writerec(gfileo, 3, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE ALVWF" - the_data = gaussian_data%alvwf - call nemsio_writerec(gfileo, 4, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE CANOPY" - the_data = gaussian_data%canopy - call nemsio_writerec(gfileo, 5, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE CRAIN (SRFLAG)" - the_data = gaussian_data%srflag - call nemsio_writerec(gfileo, 6, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE F10M" - the_data = gaussian_data%f10m - call nemsio_writerec(gfileo, 7, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE FACSF" - the_data = gaussian_data%facsf - call nemsio_writerec(gfileo, 8, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE FACWF" - the_data = gaussian_data%facwf - call nemsio_writerec(gfileo, 9, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE FFHH" - the_data = gaussian_data%ffhh - call nemsio_writerec(gfileo, 10, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE FFMM" - the_data = gaussian_data%ffmm - call nemsio_writerec(gfileo, 11, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE UUSTAR" - the_data = gaussian_data%uustar - call nemsio_writerec(gfileo, 12, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE FICE" - the_data = gaussian_data%fice - call nemsio_writerec(gfileo, 13, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE HICE" - the_data = gaussian_data%hice - call nemsio_writerec(gfileo, 14, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SLMSK" - the_data = gaussian_data%slmask - call nemsio_writerec(gfileo, 15, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE OROG" - the_data = gaussian_data%orog - call nemsio_writerec(gfileo, 16, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SNOALB" - the_data = gaussian_data%snoalb - call nemsio_writerec(gfileo, 17, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE ZORL" - the_data = gaussian_data%zorl * 0.01 ! meters - call nemsio_writerec(gfileo, 18, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SHDMAX" - the_data = gaussian_data%shdmax - call nemsio_writerec(gfileo, 19, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SHDMIN" - the_data = gaussian_data%shdmin - call nemsio_writerec(gfileo, 20, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SLC" - the_data = gaussian_data%slc(:,1) - call nemsio_writerec(gfileo, 21, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%slc(:,2) - call nemsio_writerec(gfileo, 22, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%slc(:,3) - call nemsio_writerec(gfileo, 23, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%slc(:,4) - call nemsio_writerec(gfileo, 24, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SLOPE" - the_data = gaussian_data%slope - call nemsio_writerec(gfileo, 25, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SMC" - the_data = gaussian_data%smc(:,1) - call nemsio_writerec(gfileo, 26, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%smc(:,2) - call nemsio_writerec(gfileo, 27, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%smc(:,3) - call nemsio_writerec(gfileo, 28, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%smc(:,4) - call nemsio_writerec(gfileo, 29, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SNWDPH" - the_data = gaussian_data%snwdph * 0.001 ! meters - call nemsio_writerec(gfileo, 30, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE STYPE" - the_data = gaussian_data%stype - call nemsio_writerec(gfileo, 31, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE Q2M" - the_data = gaussian_data%q2m - call nemsio_writerec(gfileo, 32, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE STC" - the_data = gaussian_data%stc(:,1) - call nemsio_writerec(gfileo, 33, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%stc(:,2) - call nemsio_writerec(gfileo, 34, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%stc(:,3) - call nemsio_writerec(gfileo, 35, the_data, iret=iret) - if (iret /= 0) goto 44 - - the_data = gaussian_data%stc(:,4) - call nemsio_writerec(gfileo, 36, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE TG3" - the_data = gaussian_data%tg3 - call nemsio_writerec(gfileo, 37, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE TISFC" - the_data = gaussian_data%tisfc - call nemsio_writerec(gfileo, 38, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE T2M" - the_data = gaussian_data%t2m - call nemsio_writerec(gfileo, 39, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE TSEA" - the_data = gaussian_data%tsea - call nemsio_writerec(gfileo, 40, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE TPRCP" - the_data = gaussian_data%tprcp - call nemsio_writerec(gfileo, 41, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE VFRAC" - the_data = gaussian_data%vfrac * 100.0 ! whole percent - call nemsio_writerec(gfileo, 42, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE VTYPE" - the_data = gaussian_data%vtype - call nemsio_writerec(gfileo, 43, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE SHELEG" - the_data = gaussian_data%sheleg - call nemsio_writerec(gfileo, 44, the_data, iret=iret) - if (iret /= 0) goto 44 - - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - - print*,"- WRITE C0" - the_data = gaussian_data%c0 - call nemsio_writerec(gfileo, 45, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE CD" - the_data = gaussian_data%cd - call nemsio_writerec(gfileo, 46, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE DCONV" - the_data = gaussian_data%dconv - call nemsio_writerec(gfileo, 47, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE DTCOOL" - the_data = gaussian_data%dtcool - call nemsio_writerec(gfileo, 48, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE QRAIN" - the_data = gaussian_data%qrain - call nemsio_writerec(gfileo, 49, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE TREF" - the_data = gaussian_data%tref - call nemsio_writerec(gfileo, 50, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE W0" - the_data = gaussian_data%w0 - call nemsio_writerec(gfileo, 51, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE WD" - the_data = gaussian_data%wd - call nemsio_writerec(gfileo, 52, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE XS" - the_data = gaussian_data%xs - call nemsio_writerec(gfileo, 53, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE XT" - the_data = gaussian_data%xt - call nemsio_writerec(gfileo, 54, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE XTTS" - the_data = gaussian_data%xtts - call nemsio_writerec(gfileo, 55, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE XU" - the_data = gaussian_data%xu - call nemsio_writerec(gfileo, 56, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE XV" - the_data = gaussian_data%xv - call nemsio_writerec(gfileo, 57, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE XZ" - the_data = gaussian_data%xz - call nemsio_writerec(gfileo, 58, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE XZTS" - the_data = gaussian_data%xzts - call nemsio_writerec(gfileo, 59, the_data, iret=iret) - if (iret /= 0) goto 44 - - print*,"- WRITE ZC" - the_data = gaussian_data%zc - call nemsio_writerec(gfileo, 60, the_data, iret=iret) - if (iret /= 0) goto 44 - - endif - - call nemsio_close(gfileo,iret=iret) - - call nemsio_finalize() - - deallocate(the_data) - - return - - 43 continue - print*,"- ** FATAL ERROR OPENING/READING VCOORD FILE." - print*,"- IRET IS: ", iret - call errexit(17) - stop - - 44 continue - print*,"- ** FATAL ERROR WRITING GAUSSIAN NEMSIO FILE." - print*,"- IRET IS: ", iret - call errexit(15) - stop - - end subroutine write_sfc_data_nemsio - -!------------------------------------------------------------------------------------------- -! Read tile data. -!------------------------------------------------------------------------------------------- - - subroutine read_data_anl - - use netcdf - use io - - implicit none - - integer :: ijtile, id_dim, id_var - integer :: error, tile, ncid - integer :: istart, iend - - real(kind=8), allocatable :: dummy(:,:), dummy3d(:,:,:) - -!------------------------------------------------------------------------------------------- -! Get tile dimensions from the first analysis file. -!------------------------------------------------------------------------------------------- - - error=nf90_open("./anal.tile1.nc",nf90_nowrite,ncid) - error=nf90_inq_dimid(ncid, 'xaxis_1', id_dim) - call netcdf_err(error, 'READING xaxis_1' ) - error=nf90_inquire_dimension(ncid,id_dim,len=itile) - call netcdf_err(error, 'READING xaxis_1' ) - - error=nf90_inq_dimid(ncid, 'yaxis_1', id_dim) - call netcdf_err(error, 'READING yaxis_1' ) - error=nf90_inquire_dimension(ncid,id_dim,len=jtile) - call netcdf_err(error, 'READING yaxis_1' ) - - error = nf90_close(ncid) - - ijtile = itile*jtile - - allocate(dummy(itile,jtile)) - allocate(dummy3d(itile,jtile,4)) - - allocate(tile_data%orog(ijtile*num_tiles)) - allocate(tile_data%canopy(ijtile*num_tiles)) - allocate(tile_data%slmask(ijtile*num_tiles)) - allocate(tile_data%tg3(ijtile*num_tiles)) - allocate(tile_data%alvsf(ijtile*num_tiles)) - allocate(tile_data%alvwf(ijtile*num_tiles)) - allocate(tile_data%alnsf(ijtile*num_tiles)) - allocate(tile_data%alnwf(ijtile*num_tiles)) - allocate(tile_data%facsf(ijtile*num_tiles)) - allocate(tile_data%facwf(ijtile*num_tiles)) - allocate(tile_data%ffhh(ijtile*num_tiles)) - allocate(tile_data%ffmm(ijtile*num_tiles)) - allocate(tile_data%fice(ijtile*num_tiles)) - allocate(tile_data%hice(ijtile*num_tiles)) - allocate(tile_data%sheleg(ijtile*num_tiles)) - allocate(tile_data%stype(ijtile*num_tiles)) - allocate(tile_data%vfrac(ijtile*num_tiles)) - allocate(tile_data%vtype(ijtile*num_tiles)) - allocate(tile_data%zorl(ijtile*num_tiles)) - allocate(tile_data%tsea(ijtile*num_tiles)) - allocate(tile_data%f10m(ijtile*num_tiles)) - allocate(tile_data%q2m(ijtile*num_tiles)) - allocate(tile_data%shdmax(ijtile*num_tiles)) - allocate(tile_data%shdmin(ijtile*num_tiles)) - allocate(tile_data%slope(ijtile*num_tiles)) - allocate(tile_data%snoalb(ijtile*num_tiles)) - allocate(tile_data%srflag(ijtile*num_tiles)) - allocate(tile_data%snwdph(ijtile*num_tiles)) - allocate(tile_data%t2m(ijtile*num_tiles)) - allocate(tile_data%tisfc(ijtile*num_tiles)) - allocate(tile_data%tprcp(ijtile*num_tiles)) - allocate(tile_data%uustar(ijtile*num_tiles)) - allocate(tile_data%slc(ijtile*num_tiles,4)) - allocate(tile_data%smc(ijtile*num_tiles,4)) - allocate(tile_data%stc(ijtile*num_tiles,4)) -! nst - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - allocate(tile_data%c0(ijtile*num_tiles)) - allocate(tile_data%cd(ijtile*num_tiles)) - allocate(tile_data%dconv(ijtile*num_tiles)) - allocate(tile_data%dtcool(ijtile*num_tiles)) - allocate(tile_data%land(ijtile*num_tiles)) - allocate(tile_data%qrain(ijtile*num_tiles)) - allocate(tile_data%tref(ijtile*num_tiles)) - allocate(tile_data%w0(ijtile*num_tiles)) - allocate(tile_data%wd(ijtile*num_tiles)) - allocate(tile_data%xs(ijtile*num_tiles)) - allocate(tile_data%xt(ijtile*num_tiles)) - allocate(tile_data%xtts(ijtile*num_tiles)) - allocate(tile_data%xu(ijtile*num_tiles)) - allocate(tile_data%xv(ijtile*num_tiles)) - allocate(tile_data%xz(ijtile*num_tiles)) - allocate(tile_data%xzts(ijtile*num_tiles)) - allocate(tile_data%zc(ijtile*num_tiles)) - endif - - do tile = 1, 6 - - print* - print*, "- READ INPUT SFC DATA FOR TILE: ", tile - - istart = (ijtile) * (tile-1) + 1 - iend = istart + ijtile - 1 - - if (tile==1) error=nf90_open("./anal.tile1.nc",nf90_nowrite,ncid) - if (tile==2) error=nf90_open("./anal.tile2.nc",nf90_nowrite,ncid) - if (tile==3) error=nf90_open("./anal.tile3.nc",nf90_nowrite,ncid) - if (tile==4) error=nf90_open("./anal.tile4.nc",nf90_nowrite,ncid) - if (tile==5) error=nf90_open("./anal.tile5.nc",nf90_nowrite,ncid) - if (tile==6) error=nf90_open("./anal.tile6.nc",nf90_nowrite,ncid) - - call netcdf_err(error, 'OPENING FILE' ) - - error=nf90_inq_varid(ncid, "slmsk", id_var) - call netcdf_err(error, 'READING slmsk ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING slmsk' ) - print*,'- SLMSK: ',maxval(dummy),minval(dummy) - tile_data%slmask(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "tsea", id_var) - call netcdf_err(error, 'READING tsea ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING tsea' ) - print*,'- TSEA: ',maxval(dummy),minval(dummy) - tile_data%tsea(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "sheleg", id_var) - call netcdf_err(error, 'READING sheleg ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING sheleg' ) - print*,'- SHELEG: ',maxval(dummy),minval(dummy) - tile_data%sheleg(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "tg3", id_var) - call netcdf_err(error, 'READING tg3 ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING tg3' ) - print*,'- TG3: ',maxval(dummy),minval(dummy) - tile_data%tg3(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "zorl", id_var) - call netcdf_err(error, 'READING zorl ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING zorl' ) - print*,'- ZORL: ',maxval(dummy),minval(dummy) - tile_data%zorl(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "alvsf", id_var) - call netcdf_err(error, 'READING alvsf ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING alvsf' ) - print*,'- ALVSF: ',maxval(dummy),minval(dummy) - tile_data%alvsf(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "alvwf", id_var) - call netcdf_err(error, 'READING alvwf ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING alvwf' ) - print*,'- ALVWF: ',maxval(dummy),minval(dummy) - tile_data%alvwf(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "alnsf", id_var) - call netcdf_err(error, 'READING alnsf ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING alnsf' ) - print*,'- ALNSF: ',maxval(dummy),minval(dummy) - tile_data%alnsf(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "alnwf", id_var) - call netcdf_err(error, 'READING alnwf ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING alnwf' ) - print*,'- ALNWF: ',maxval(dummy),minval(dummy) - tile_data%alnwf(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "facsf", id_var) - call netcdf_err(error, 'READING facsf ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING facsf' ) - print*,'- FACSF: ',maxval(dummy),minval(dummy) - tile_data%facsf(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "facwf", id_var) - call netcdf_err(error, 'READING facwf ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING facwf' ) - print*,'- FACWF: ',maxval(dummy),minval(dummy) - tile_data%facwf(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "vfrac", id_var) - call netcdf_err(error, 'READING vfrac ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING vfrac' ) - print*,'- VFRAC: ',maxval(dummy),minval(dummy) - tile_data%vfrac(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "canopy", id_var) - call netcdf_err(error, 'READING canopy ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING canopy' ) - print*,'- CANOPY: ',maxval(dummy),minval(dummy) - tile_data%canopy(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "f10m", id_var) - call netcdf_err(error, 'READING f10m ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING f10m' ) - print*,'- F10M: ',maxval(dummy),minval(dummy) - tile_data%f10m(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "t2m", id_var) - call netcdf_err(error, 'READING t2m ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING t2m' ) - print*,'- T2M: ',maxval(dummy),minval(dummy) - tile_data%t2m(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "q2m", id_var) - call netcdf_err(error, 'READING q2m ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING q2m' ) - print*,'- Q2M: ',maxval(dummy),minval(dummy) - tile_data%q2m(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "vtype", id_var) - call netcdf_err(error, 'READING vtype ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING vtype' ) - print*,'- VTYPE: ',maxval(dummy),minval(dummy) - tile_data%vtype(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "stype", id_var) - call netcdf_err(error, 'READING stype ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING stype' ) - print*,'- STYPE: ',maxval(dummy),minval(dummy) - tile_data%stype(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "uustar", id_var) - call netcdf_err(error, 'READING uustar ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING uustar' ) - print*,'- UUSTAR: ',maxval(dummy),minval(dummy) - tile_data%uustar(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "ffmm", id_var) - call netcdf_err(error, 'READING ffmm ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING ffmm' ) - print*,'- FFMM: ',maxval(dummy),minval(dummy) - tile_data%ffmm(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "ffhh", id_var) - call netcdf_err(error, 'READING ffhh ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING ffhh' ) - print*,'- FFHH: ',maxval(dummy),minval(dummy) - tile_data%ffhh(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "hice", id_var) - call netcdf_err(error, 'READING hice ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING hice' ) - print*,'- HICE: ',maxval(dummy),minval(dummy) - tile_data%hice(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "fice", id_var) - call netcdf_err(error, 'READING fice ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING fice' ) - print*,'- FICE: ',maxval(dummy),minval(dummy) - tile_data%fice(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "tisfc", id_var) - call netcdf_err(error, 'READING tisfc ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING tisfc' ) - print*,'- TISFC: ',maxval(dummy),minval(dummy) - tile_data%tisfc(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "tprcp", id_var) - call netcdf_err(error, 'READING tprcp ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING tprcp' ) - print*,'- TPRCP: ',maxval(dummy),minval(dummy) - tile_data%tprcp(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "srflag", id_var) - call netcdf_err(error, 'READING srflag ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING srfalg' ) - print*,'- SRFLAG: ',maxval(dummy),minval(dummy) - tile_data%srflag(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "snwdph", id_var) - call netcdf_err(error, 'READING snwdph ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING snwdph' ) - print*,'- SNWDPH: ',maxval(dummy),minval(dummy) - tile_data%snwdph(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "shdmin", id_var) - call netcdf_err(error, 'READING shdmin ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING shdmin' ) - print*,'- SHDMIN: ',maxval(dummy),minval(dummy) - tile_data%shdmin(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "shdmax", id_var) - call netcdf_err(error, 'READING shdmax ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING shdmax' ) - print*,'- SHDMAX: ',maxval(dummy),minval(dummy) - tile_data%shdmax(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "slope", id_var) - call netcdf_err(error, 'READING slope ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING slope' ) - print*,'- SLOPE: ',maxval(dummy),minval(dummy) - tile_data%slope(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "snoalb", id_var) - call netcdf_err(error, 'READING snoalb ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING snoalb' ) - print*,'- SNOALB: ',maxval(dummy),minval(dummy) - tile_data%snoalb(istart:iend) = reshape(dummy, (/ijtile/)) - - if (trim(donst) == "yes" .or. trim(donst) == "YES") then - - error=nf90_inq_varid(ncid, "c_0", id_var) - call netcdf_err(error, 'READING c_0 ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING c_0' ) - print*,'- C_0: ',maxval(dummy),minval(dummy) - tile_data%c0(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "c_d", id_var) - call netcdf_err(error, 'READING c_d ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING c_d' ) - print*,'- C_D: ',maxval(dummy),minval(dummy) - tile_data%cd(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "d_conv", id_var) - call netcdf_err(error, 'READING d_conv ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING d_conv' ) - print*,'- D_CONV: ',maxval(dummy),minval(dummy) - tile_data%dconv(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "dt_cool", id_var) - call netcdf_err(error, 'READING dt_cool ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING dt_cool' ) - print*,'- DT_COOL: ',maxval(dummy),minval(dummy) - tile_data%dtcool(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "qrain", id_var) - call netcdf_err(error, 'READING qrain ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING qrain' ) - print*,'- QRAIN: ',maxval(dummy),minval(dummy) - tile_data%qrain(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "tref", id_var) - call netcdf_err(error, 'READING tref ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING tref' ) - print*,'- TREF: ',maxval(dummy),minval(dummy) - tile_data%tref(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "w_0", id_var) - call netcdf_err(error, 'READING w_0 ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING w_0' ) - print*,'- W_0: ',maxval(dummy),minval(dummy) - tile_data%w0(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "w_d", id_var) - call netcdf_err(error, 'READING w_d ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING w_d' ) - print*,'- W_D: ',maxval(dummy),minval(dummy) - tile_data%wd(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "xs", id_var) - call netcdf_err(error, 'READING xs ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING xs' ) - print*,'- XS: ',maxval(dummy),minval(dummy) - tile_data%xs(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "xt", id_var) - call netcdf_err(error, 'READING xt ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING xt' ) - print*,'- XT: ',maxval(dummy),minval(dummy) - tile_data%xt(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "xtts", id_var) - call netcdf_err(error, 'READING xtts ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING xtts' ) - print*,'- XTTS: ',maxval(dummy),minval(dummy) - tile_data%xtts(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "xzts", id_var) - call netcdf_err(error, 'READING xzts ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING xzts' ) - print*,'- XZTS: ',maxval(dummy),minval(dummy) - tile_data%xzts(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "xu", id_var) - call netcdf_err(error, 'READING xu ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING xu' ) - print*,'- XU: ',maxval(dummy),minval(dummy) - tile_data%xu(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "xv", id_var) - call netcdf_err(error, 'READING xv ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING xv' ) - print*,'- XV: ',maxval(dummy),minval(dummy) - tile_data%xv(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "xz", id_var) - call netcdf_err(error, 'READING xz ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING xz' ) - print*,'- XZ: ',maxval(dummy),minval(dummy) - tile_data%xz(istart:iend) = reshape(dummy, (/ijtile/)) - - error=nf90_inq_varid(ncid, "z_c", id_var) - call netcdf_err(error, 'READING z_c ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING z_c' ) - print*,'- Z_C: ',maxval(dummy),minval(dummy) - tile_data%zc(istart:iend) = reshape(dummy, (/ijtile/)) - - endif ! nst fields - - error=nf90_inq_varid(ncid, "smc", id_var) - call netcdf_err(error, 'READING smc ID' ) - error=nf90_get_var(ncid, id_var, dummy3d) - call netcdf_err(error, 'READING smc' ) - print*,'- SMC: ',maxval(dummy3d),minval(dummy3d) - tile_data%smc(istart:iend,1:4) = reshape(dummy3d, (/ijtile,4/)) - - error=nf90_inq_varid(ncid, "stc", id_var) - call netcdf_err(error, 'READING stc ID' ) - error=nf90_get_var(ncid, id_var, dummy3d) - call netcdf_err(error, 'READING stc' ) - print*,'- STC: ',maxval(dummy3d),minval(dummy3d) - tile_data%stc(istart:iend,1:4) = reshape(dummy3d, (/ijtile,4/)) - - error=nf90_inq_varid(ncid, "slc", id_var) - call netcdf_err(error, 'READING slc ID' ) - error=nf90_get_var(ncid, id_var, dummy3d) - call netcdf_err(error, 'READING slc' ) - print*,'- SLC: ',maxval(dummy3d),minval(dummy3d) - tile_data%slc(istart:iend,1:4) = reshape(dummy3d, (/ijtile,4/)) - - error = nf90_close(ncid) - - print* - print*, "- READ INPUT OROG DATA FOR TILE: ",tile - - if (tile==1) error=nf90_open("./orog.tile1.nc",nf90_nowrite,ncid) - if (tile==2) error=nf90_open("./orog.tile2.nc",nf90_nowrite,ncid) - if (tile==3) error=nf90_open("./orog.tile3.nc",nf90_nowrite,ncid) - if (tile==4) error=nf90_open("./orog.tile4.nc",nf90_nowrite,ncid) - if (tile==5) error=nf90_open("./orog.tile5.nc",nf90_nowrite,ncid) - if (tile==6) error=nf90_open("./orog.tile6.nc",nf90_nowrite,ncid) - - call netcdf_err(error, 'OPENING FILE' ) - - error=nf90_inq_varid(ncid, "orog_raw", id_var) - call netcdf_err(error, 'READING orog_raw ID' ) - error=nf90_get_var(ncid, id_var, dummy) - call netcdf_err(error, 'READING orog_raw' ) - print*,'- OROG: ',maxval(dummy),minval(dummy) - tile_data%orog(istart:iend) = reshape(dummy, (/ijtile/)) - - error = nf90_close(ncid) - - enddo - - deallocate (dummy, dummy3d) - - end subroutine read_data_anl - -!------------------------------------------------------------------------------------------- -! Netcdf error routine. -!------------------------------------------------------------------------------------------- - - subroutine netcdf_err(err, string) - - use netcdf - - implicit none - - character(len=*), intent(in) :: string - integer, intent(in) :: err - - character(len=256) :: errmsg - - if( err.eq.nf90_noerr )return - - errmsg = nf90_strerror(err) - print*,'' - print*,'** FATAL ERROR: ', trim(string), ': ', trim(errmsg) - print*,'STOP.' - call errexit(22) - - return - end subroutine netcdf_err diff --git a/sorc/gaussian_sfcanl.fd/makefile.sh b/sorc/gaussian_sfcanl.fd/makefile.sh deleted file mode 100755 index b1c5adefde..0000000000 --- a/sorc/gaussian_sfcanl.fd/makefile.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -export FFLAGS="-O3 -fp-model precise -g -r8 -i4" -# for debugging -#export FFLAGS="-g -r8 -i4 -warn unused -check bounds" - -export NETCDF_INCLUDE="-I${NETCDF}/include" -export NETCDF_LDFLAGS_F="-L${NETCDF}/lib -lnetcdf -lnetcdff -L${HDF5}/lib -lhdf5 " - -make clean -make build -err=$? -if [ $err -ne 0 ]; then - echo ERROR BUILDING GAUSSIAN_SFCANL - exit 2 -fi -make install - -exit diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/README b/sorc/gaussian_sfcanl.fd/weight_gen/README deleted file mode 100644 index 304c6f0e0e..0000000000 --- a/sorc/gaussian_sfcanl.fd/weight_gen/README +++ /dev/null @@ -1,23 +0,0 @@ -Creates the ESMF integration weight files to transform from cubed-sphere grids -to comparable (in resolution) global gaussian grids. - -First, compile the program that creates the 'scrip' files for the -global gaussian grids. For each resolution, two grids are created: -one normal grid and one grid with two extra rows for the N/S poles. -To compile, cd to ./scrip.fd and type 'make.sh'. Currently, only -compiles/runs on Theia. - -Then, run the 'run.theia.ksh' script for the resolution desired. -Script first calls the 'scrip' program, then calls ESMF utility -'RegridWeightGen' to create the interpolation weight files. - -Weight files for the following transforms are created: - -C48 => 192x94 and 192x96 gaussian -C96 => 384x192 and 384x194 gaussian -C128 => 512x256 and 512x258 gaussian -C192 => 768x384 and 768x386 gaussian -C384 => 1536x768 and 1536x770 gaussian -C768 => 3072x1536 and 3072x1538 gaussian -C1152 => 4608x2304 and 4608x2406 gaussian -C3072 => 12288x6144 and 12288x6146 gaussian diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/run.theia.sh b/sorc/gaussian_sfcanl.fd/weight_gen/run.theia.sh deleted file mode 100755 index c1673fd655..0000000000 --- a/sorc/gaussian_sfcanl.fd/weight_gen/run.theia.sh +++ /dev/null @@ -1,152 +0,0 @@ -#!/bin/sh - -#------------------------------------------------------------------------ -# Run the "RegridWeightGen" step on Theia to create interpolation -# weight files to transform from cubed-sphere tiles to global -# gaussian. -# -# First, create the 'scrip' files for the gaussian grids. Two -# grids are created - the normal gaussian grid, and one with -# two extra rows at the N/S poles. The program to create the -# script files is in ./scrip.fd. To compile, type 'make.sh'. -# Then, run the RegridWeightGen step to create the interpolation -# weight files. -#------------------------------------------------------------------------ - -#PBS -l nodes=1:ppn=1 -#PBS -l walltime=0:30:00 -#PBS -A fv3-cpu -#PBS -q debug -#PBS -N fv3_wgtgen -#PBS -o ./log -#PBS -e ./log - -set -x - -CRES=C48 # run with one mpi task -#CRES=C96 # run with one mpi task -#CRES=C128 # run with one mpi task -#CRES=C192 # run with one mpi task -#CRES=C384 # run with one mpi task -#CRES=C768 # run with 4 mpi tasks -#CRES=C1152 # run with 8 mpi tasks -#CRES=C3072 # run on two nodes, 8 tasks per node - -WORK=/scratch3/NCEPDEV/stmp1/$LOGNAME/weight_gen -rm -fr $WORK -mkdir -p $WORK -cd $WORK - -source /apps/lmod/lmod/init/sh -module purge -module load intel/15.1.133 -module load impi/5.0.1.035 -module use /scratch4/NCEPDEV/nems/noscrub/emc.nemspara/soft/modulefiles -module load esmf/7.1.0r -module load netcdf/4.3.0 -module load hdf5/1.8.14 - -#------------------------------------------------------------------------ -# The RegridWeightGen program. -#------------------------------------------------------------------------ - -RWG=/scratch4/NCEPDEV/nems/noscrub/emc.nemspara/soft/esmf/7.1.0r/bin/ESMF_RegridWeightGen - -#------------------------------------------------------------------------ -# Path to the 'mosaic' and 'grid' files for each cubed-sphere -# resolution. -#------------------------------------------------------------------------ - -FIX_DIR=/scratch4/NCEPDEV/global/save/glopara/svn/fv3gfs/fix/fix_fv3_gmted2010/$CRES - -#------------------------------------------------------------------------ -# Create 'scrip' files for two gaussian grids. One normal grid -# and one with two extra rows at the N/S poles. -#------------------------------------------------------------------------ - -${PBS_O_WORKDIR}/scrip.fd/scrip.exe $CRES - -if [[ $? -ne 0 ]]; then - echo "ERROR CREATING SCRIP FILE" - exit 2 -fi - -#------------------------------------------------------------------------ -# Create weight files. -#------------------------------------------------------------------------ - -case $CRES in - "C48" ) - LONB="192" - LATB="94" - LATB2="96" - ;; - "C96" ) - LONB="384" - LATB="192" - LATB2="194" - ;; - "C128" ) - LONB="512" - LATB="256" - LATB2="258" - ;; - "C192" ) - LONB="768" - LATB="384" - LATB2="386" - ;; - "C384" ) - LONB="1536" - LATB="768" - LATB2="770" - ;; - "C768" ) - LONB="3072" - LATB="1536" - LATB2="1538" - ;; - "C1152" ) - LONB="4608" - LATB="2304" - LATB2="2306" - ;; - "C3072" ) - LONB="12288" - LATB="6144" - LATB2="6146" - ;; - * ) - echo "GRID NOT SUPPORTED" - exit 3 - ;; -esac - -np=$PBS_NP - -mpirun -np $np $RWG -d ./gaussian.${LONB}.${LATB}.nc -s $FIX_DIR/${CRES}_mosaic.nc \ - -w fv3_SCRIP_${CRES}_GRIDSPEC_lon${LONB}_lat${LATB}.gaussian.neareststod.nc \ - -m neareststod --64bit_offset --tilefile_path $FIX_DIR - -mpirun -np $np $RWG -d ./gaussian.${LONB}.${LATB}.nc -s $FIX_DIR/${CRES}_mosaic.nc \ - -w fv3_SCRIP_${CRES}_GRIDSPEC_lon${LONB}_lat${LATB}.gaussian.bilinear.nc \ - -m bilinear --64bit_offset --tilefile_path $FIX_DIR - -mpirun -np $np $RWG -d ./gaussian.${LONB}.${LATB2}.nc -s $FIX_DIR/${CRES}_mosaic.nc \ - -w fv3_SCRIP_${CRES}_GRIDSPEC_lon${LONB}_lat${LATB2}.gaussian.neareststod.nc \ - -m neareststod --64bit_offset --tilefile_path $FIX_DIR - -#------------------------------------------------------------------------ -# Could not get this C3072 bilinear option to work. This grid is -# so big we are pushing the limits of the utility. -#------------------------------------------------------------------------ - -if [[ $CRES == "C3072" ]]; then - exit 0 -fi - -mpirun -np $np $RWG -d ./gaussian.${LONB}.${LATB2}.nc -s $FIX_DIR/${CRES}_mosaic.nc \ - -w fv3_SCRIP_${CRES}_GRIDSPEC_lon${LONB}_lat${LATB2}.gaussian.bilinear.nc \ - -m bilinear --64bit_offset --tilefile_path $FIX_DIR - -exit diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/make.sh b/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/make.sh deleted file mode 100755 index 12ed3eefd9..0000000000 --- a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/make.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/sh - -set -x - -mac=$(hostname -f) - -case $mac in - -#--------------------------------------------------------------------------------- -# BUILD PROGRAM ON WCOSS Phase 1/2. -#--------------------------------------------------------------------------------- - -g????.ncep.noaa.gov | t????.ncep.noaa.gov) - - echo "WCOSS PHASE 1/2 BUILD NOT ADDED YET" - exit 1 ;; - -#--------------------------------------------------------------------------------- -# BUILD PROGRAM ON WCOSS CRAY. -#--------------------------------------------------------------------------------- - -llogin? | slogin?) - - echo "WCOSS CRAY BUILD NOT ADDED YET" - exit 1 ;; - -#--------------------------------------------------------------------------------- -# BUILD PROGRAM ON HERA. -#--------------------------------------------------------------------------------- - -hfe??) - - source /apps/lmod/lmod/init/sh - module purge - - module load intel/18.0.5.274 - - export FCOMP=ifort - export FFLAGS="-O0 -g -traceback -r8 -i4 -convert big_endian -check bounds" - - module load netcdf/4.7.0 - module load hdf5/1.10.5 - export NETCDF_INCLUDE="-I${NETCDF}/include" - export NETCDF_LDFLAGS_F="-L${NETCDF}/lib -lnetcdf -lnetcdff -L${HDF5}/lib -lhdf5 -lhdf5_fortran" - - module use -a /scratch2/NCEPDEV/nwprod/NCEPLIBS/modulefiles - module load sp/2.0.2 - - make clean - make - rc=$? ;; - -*) - - echo "DOES NOT BUILD ON THIS MACHINE." - exit 1 ;; - -esac - -exit diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/makefile b/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/makefile deleted file mode 100755 index 74949b96bb..0000000000 --- a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/makefile +++ /dev/null @@ -1,14 +0,0 @@ -SHELL= /bin/sh - -CMD= scrip.exe - -OBJS = scrip.o - -$(CMD): $(OBJS) - $(FCOMP) $(FFLAGS) $(NETCDF_INCLUDE) -o $(CMD) $(OBJS) $(NETCDF_LDFLAGS_F) $(SP_LIBd) - -scrip.o: scrip.f90 - $(FCOMP) $(FFLAGS) $(NETCDF_INCLUDE) -c scrip.f90 - -clean: - rm -f *.o *.mod ${CMD} *.exe.* diff --git a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/scrip.f90 b/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/scrip.f90 deleted file mode 100644 index 5c4d2a4221..0000000000 --- a/sorc/gaussian_sfcanl.fd/weight_gen/scrip.fd/scrip.f90 +++ /dev/null @@ -1,350 +0,0 @@ - program scrip - -!---------------------------------------------------------------------- -! Create "scrip" files that describes a gaussian grid. -! Two files are created: the normal gaussian grid and one with -! two extra rows for the N/S poles. -!---------------------------------------------------------------------- - - implicit none - - character(len=128) :: outfile - character(len=20) :: title - character(len=5) :: idim_ch, jdim_ch, jdimp_ch - character(len=6) :: cres - - integer :: header_buffer_val = 16384 - integer :: fsize=65536, inital = 0 - integer :: error, ncid - integer :: i, j, idim, jdim, ijdim - integer :: jdimp - integer :: dim_size, dim_corners, dim_rank - integer :: id_dims, id_center_lat, id_center_lon - integer :: id_imask, id_corner_lat, id_corner_lon - integer :: num_corners = 4 - integer :: rank = 2 - integer(kind=4), allocatable :: mask(:) - - real(kind=8) :: corner_lon_src - real(kind=8) :: dx_src, lat_edge - real(kind=8), allocatable :: lats(:,:), lons(:,:), dum1d(:) - real(kind=8), allocatable :: dum2d(:,:), latsp(:,:), lonsp(:,:) - real(kind=8), allocatable :: lats_corner(:,:,:), lons_corner(:,:,:) - real(kind=8), allocatable :: latsp_corner(:,:,:), lonsp_corner(:,:,:) - real(kind=8), allocatable :: slat(:), wlat(:) - - include "netcdf.inc" - - call getarg(1, cres) - - select case (trim(cres)) - case ("c48","C48") - idim = 192 ! cres * 4 - jdim = 94 ! cres * 2 - jdimp = 96 ! include two rows for the poles - idim_ch = "192" - jdim_ch = "94" - jdimp_ch = "96" - case ("c96","C96") - idim = 384 ! cres * 4 - jdim = 192 ! cres * 2 - jdimp = 194 ! include two rows for the poles - idim_ch = "384" - jdim_ch = "192" - jdimp_ch = "194" - case ("c128","C128") - idim = 512 ! cres * 4 - jdim = 256 ! cres * 2 - jdimp = 258 ! include two rows for the poles - idim_ch = "512" - jdim_ch = "256" - jdimp_ch = "258" - case ("c192","C192") - idim = 768 ! cres * 4 - jdim = 384 ! cres * 2 - jdimp = 386 ! include two rows for the poles - idim_ch = "768" - jdim_ch = "384" - jdimp_ch = "386" - case ("c384","C384") - idim = 1536 ! cres * 4 - jdim = 768 ! cres * 2 - jdimp = 770 ! include two rows for the poles - idim_ch = "1536" - jdim_ch = "768" - jdimp_ch = "770" - case ("c768","C768") - idim = 3072 ! cres * 4 - jdim = 1536 ! cres * 2 - jdimp = 1538 ! include two rows for the poles - idim_ch = "3072" - jdim_ch = "1536" - jdimp_ch = "1538" - case ("c1152","C1152") - idim = 4608 ! cres * 4 - jdim = 2304 ! cres * 2 - jdimp = 2306 ! include two rows for the poles - idim_ch = "4608" - jdim_ch = "2304" - jdimp_ch = "2306" - case ("c3072","C3072") - idim = 12288 ! cres * 4 - jdim = 6144 ! cres * 2 - jdimp = 6146 ! include two rows for the poles - idim_ch = "12288" - jdim_ch = "6144" - jdimp_ch = "6146" - case default - print*,'- Resolution not supported ', trim(cres) - stop 3 - end select - - corner_lon_src = 0.0 - dx_src = 360.0 / float(idim) - ijdim = idim*jdim - - allocate(slat(jdim)) - allocate(wlat(jdim)) - - call splat(4, jdim, slat, wlat) - - allocate(lats(idim,jdim)) - allocate(lats_corner(num_corners,idim,jdim)) - allocate(lons(idim,jdim)) - allocate(lons_corner(num_corners,idim,jdim)) - - do j = 1, jdim - lats(:,j) = 90.0 - (acos(slat(j))* 180.0 / (4.*atan(1.))) - enddo - - deallocate(slat, wlat) - -!---------------------------------------------------------------- -! First, output file without poles. -!---------------------------------------------------------------- - -!---------------------------------------------------------------- -! Set corners in counter-clockwise order -! -! 2 1 -! -! C -! -! 3 4 -!---------------------------------------------------------------- - - lats_corner(1,:,1) = 90.0 - lats_corner(2,:,1) = 90.0 - - lats_corner(3,:,jdim) = -90.0 - lats_corner(4,:,jdim) = -90.0 - - do j = 1, jdim - 1 - lat_edge = (lats(1,j) + lats(1,j+1)) / 2.0 - lats_corner(3,:,j) = lat_edge - lats_corner(4,:,j) = lat_edge - lats_corner(1,:,j+1) = lat_edge - lats_corner(2,:,j+1) = lat_edge - enddo - - do i = 1, idim - lons(i,:) = corner_lon_src + float(i-1)*dx_src - lons_corner(1,i,:) = lons(i,:) + (dx_src*0.5) - lons_corner(2,i,:) = lons(i,:) - (dx_src*0.5) - lons_corner(3,i,:) = lons(i,:) - (dx_src*0.5) - lons_corner(4,i,:) = lons(i,:) + (dx_src*0.5) - enddo - - i = 1 - j = 1 - print*,'center ',lats(i,j),lons(i,j) - print*,'corner 1 ',lats_corner(1,i,j),lons_corner(1,i,j) - print*,'corner 2 ',lats_corner(2,i,j),lons_corner(2,i,j) - print*,'corner 3 ',lats_corner(3,i,j),lons_corner(3,i,j) - print*,'corner 4 ',lats_corner(4,i,j),lons_corner(4,i,j) - - i = 1 - j = 2 - print*,'center ',lats(i,j),lons(i,j) - print*,'corner 1 ',lats_corner(1,i,j),lons_corner(1,i,j) - print*,'corner 2 ',lats_corner(2,i,j),lons_corner(2,i,j) - print*,'corner 3 ',lats_corner(3,i,j),lons_corner(3,i,j) - print*,'corner 4 ',lats_corner(4,i,j),lons_corner(4,i,j) - - i = 1 - j = jdim - print*,'center ',lats(i,j),lons(i,j) - print*,'corner 1 ',lats_corner(1,i,j),lons_corner(1,i,j) - print*,'corner 2 ',lats_corner(2,i,j),lons_corner(2,i,j) - print*,'corner 3 ',lats_corner(3,i,j),lons_corner(3,i,j) - print*,'corner 4 ',lats_corner(4,i,j),lons_corner(4,i,j) - - i = 1 - j = jdim-1 - print*,'center ',lats(i,j),lons(i,j) - print*,'corner 1 ',lats_corner(1,i,j),lons_corner(1,i,j) - print*,'corner 2 ',lats_corner(2,i,j),lons_corner(2,i,j) - print*,'corner 3 ',lats_corner(3,i,j),lons_corner(3,i,j) - print*,'corner 4 ',lats_corner(4,i,j),lons_corner(4,i,j) - - allocate(mask(ijdim)) - mask = 1 - -! output file without pole. - - outfile = " " - outfile = "./gaussian." // trim(idim_ch) // "." // trim(jdim_ch) // ".nc" - title = " " - title = "gaussian." // trim(idim_ch) // "." // trim(jdim_ch) - -!--- open the file - error = NF__CREATE(outfile, IOR(NF_NETCDF4,NF_CLASSIC_MODEL), inital, fsize, ncid) - print*, 'error after open ', error - -!--- define dimension - error = nf_def_dim(ncid, 'grid_size', ijdim, dim_size) - error = nf_def_dim(ncid, 'grid_corners', num_corners, dim_corners) - error = nf_def_dim(ncid, 'grid_rank', rank, dim_rank) - -!--- define field - error = nf_def_var(ncid, 'grid_dims', NF_INT, 1, (/dim_rank/), id_dims) - error = nf_def_var(ncid, 'grid_center_lat', NF_DOUBLE, 1, (/dim_size/), id_center_lat) - error = nf_put_att_text(ncid, id_center_lat, "units", 7, "degrees") - error = nf_def_var(ncid, 'grid_center_lon', NF_DOUBLE, 1, (/dim_size/), id_center_lon) - error = nf_put_att_text(ncid, id_center_lon, "units", 7, "degrees") - error = nf_def_var(ncid, 'grid_imask', NF_INT, 1, (/dim_size/), id_imask) - error = nf_put_att_text(ncid, id_imask, "units", 8, "unitless") - error = nf_def_var(ncid, 'grid_corner_lat', NF_DOUBLE, 2, (/dim_corners,dim_size/), id_corner_lat) - error = nf_put_att_text(ncid, id_corner_lat, "units", 7, "degrees") - error = nf_def_var(ncid, 'grid_corner_lon', NF_DOUBLE, 2, (/dim_corners,dim_size/), id_corner_lon) - error = nf_put_att_text(ncid, id_corner_lon, "units", 7, "degrees") - error = nf_put_att_text(ncid, NF_GLOBAL, "title", 20, trim(title)) - error = nf__enddef(ncid, header_buffer_val,4,0,4) - -!--- set fields - error = nf_put_var_int( ncid, id_dims, (/idim,jdim/)) - - allocate(dum1d(ijdim)) - dum1d = reshape(lats, (/ijdim/)) - error = nf_put_var_double( ncid, id_center_lat, dum1d) - dum1d = reshape(lons, (/ijdim/)) - error = nf_put_var_double( ncid, id_center_lon, dum1d) - deallocate(dum1d) - - error = nf_put_var_int( ncid, id_imask, mask) - deallocate(mask) - - allocate(dum2d(num_corners,ijdim)) - dum2d = reshape (lats_corner, (/num_corners,ijdim/)) - error = nf_put_var_double( ncid, id_corner_lat, dum2d) - - dum2d = reshape (lons_corner, (/num_corners,ijdim/)) - error = nf_put_var_double( ncid, id_corner_lon, dum2d) - deallocate(dum2d) - - error = nf_close(ncid) - -!---------------------------------------------------------------- -! output file with poles. -!---------------------------------------------------------------- - - outfile = " " - outfile = "./gaussian." // trim(idim_ch) // "." // trim(jdimp_ch) // ".nc" - title = " " - title = "gaussian." // trim(idim_ch) // "." // trim(jdimp_ch) - - ijdim = idim*jdimp - - allocate(latsp(idim,jdimp)) - allocate(lonsp(idim,jdimp)) - - do j = 2, jdim+1 - latsp(:,j) = lats(:,j-1) - lonsp(:,j) = lons(:,j-1) - enddo - - latsp(:,1) = 90.0_8 - lonsp(:,1) = 0.0_8 - - latsp(:,jdimp) = -90.0_8 - lonsp(:,jdimp) = 0.0_8 - - deallocate(lats, lons) - - allocate(latsp_corner(num_corners,idim,jdimp)) - allocate(lonsp_corner(num_corners,idim,jdimp)) - - latsp_corner(:,:,1) = 89.5_8 - latsp_corner(:,:,jdimp) = -89.5_8 - - lonsp_corner(1,:,1) = 0.0_8 - lonsp_corner(2,:,1) = 90.0_8 - lonsp_corner(3,:,1) = 180.0_8 - lonsp_corner(4,:,1) = 270.0_8 - - lonsp_corner(1,:,jdimp) = 0.0_8 - lonsp_corner(2,:,jdimp) = 90.0_8 - lonsp_corner(3,:,jdimp) = 180.0_8 - lonsp_corner(4,:,jdimp) = 270.0_8 - - do j = 2, jdim+1 - latsp_corner(:,:,j) = lats_corner(:,:,j-1) - lonsp_corner(:,:,j) = lons_corner(:,:,j-1) - enddo - - deallocate(lats_corner, lons_corner) - -!--- open the file - error = NF__CREATE(outfile, IOR(NF_NETCDF4,NF_CLASSIC_MODEL), inital, fsize, ncid) - print*, 'error after open ', error - -!--- define dimension - error = nf_def_dim(ncid, 'grid_size', ijdim, dim_size) - error = nf_def_dim(ncid, 'grid_corners', num_corners, dim_corners) - error = nf_def_dim(ncid, 'grid_rank', rank, dim_rank) - -!--- define field - error = nf_def_var(ncid, 'grid_dims', NF_INT, 1, (/dim_rank/), id_dims) - error = nf_def_var(ncid, 'grid_center_lat', NF_DOUBLE, 1, (/dim_size/), id_center_lat) - error = nf_put_att_text(ncid, id_center_lat, "units", 7, "degrees") - error = nf_def_var(ncid, 'grid_center_lon', NF_DOUBLE, 1, (/dim_size/), id_center_lon) - error = nf_put_att_text(ncid, id_center_lon, "units", 7, "degrees") - error = nf_def_var(ncid, 'grid_imask', NF_INT, 1, (/dim_size/), id_imask) - error = nf_put_att_text(ncid, id_imask, "units", 8, "unitless") - error = nf_def_var(ncid, 'grid_corner_lat', NF_DOUBLE, 2, (/dim_corners,dim_size/), id_corner_lat) - error = nf_put_att_text(ncid, id_corner_lat, "units", 7, "degrees") - error = nf_def_var(ncid, 'grid_corner_lon', NF_DOUBLE, 2, (/dim_corners,dim_size/), id_corner_lon) - error = nf_put_att_text(ncid, id_corner_lon, "units", 7, "degrees") - error = nf_put_att_text(ncid, NF_GLOBAL, "title", 20, trim(title)) - error = nf__enddef(ncid, header_buffer_val,4,0,4) - -!--- set fields - error = nf_put_var_int( ncid, id_dims, (/idim,jdimp/)) - - allocate(dum1d(ijdim)) - dum1d = reshape(latsp, (/ijdim/)) - error = nf_put_var_double( ncid, id_center_lat, dum1d) - dum1d = reshape(lonsp, (/ijdim/)) - error = nf_put_var_double( ncid, id_center_lon, dum1d) - deallocate(dum1d) - - allocate(mask(ijdim)) - mask = 1 - error = nf_put_var_int( ncid, id_imask, mask) - deallocate(mask) - - allocate(dum2d(num_corners,ijdim)) - dum2d = reshape (latsp_corner, (/num_corners,ijdim/)) - print*,'lat corner check ',maxval(dum2d),minval(dum2d) - error = nf_put_var_double( ncid, id_corner_lat, dum2d) - deallocate(latsp_corner) - - dum2d = reshape (lonsp_corner, (/num_corners,ijdim/)) - error = nf_put_var_double( ncid, id_corner_lon, dum2d) - deallocate(dum2d, lonsp_corner) - - error = nf_close(ncid) - - print*,'- DONE.' - - end program scrip diff --git a/sorc/gfs_bufr.fd/bfrhdr.f b/sorc/gfs_bufr.fd/bfrhdr.f deleted file mode 100755 index 8bab3043bc..0000000000 --- a/sorc/gfs_bufr.fd/bfrhdr.f +++ /dev/null @@ -1,174 +0,0 @@ - SUBROUTINE BFRHDR ( luntbl, cseqn, prfflg, clist, np, iret ) -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: PROGRAM NAME (up to 20 characters) -C PRGMMR: YOUR NAME ORG: W/NMCXX DATE: YY-MM-DD -C -C ABSTRACT: START ABSTRACT HERE AND INDENT TO COLUMN 5 ON THE -C FOLLOWING LINES. PLEASE PROVIDE A BRIEF DESCRIPTION OF -C WHAT THE SUBPROGRAM DOES. -C -C PROGRAM HISTORY LOG: -C YY-MM-DD ORIGINAL PROGRAMMER'S NAME HERE -C YY-MM-DD MODIFIER1 DESCRIPTION OF CHANGE -C YY-MM-DD MODIFIER2 DESCRIPTION OF CHANGE -C -C USAGE: CALL PROGRAM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C LANGUAGE: INDICATE EXTENSIONS, COMPILER OPTIONS -C MACHINE: IBM SP -C -C$$$ -C*********************************************************************** -C* BFRHDR -C* -C* This subroutine reads a Jack Woollen BUFR encoding table file to -C* get the string of parameters to be written. This subroutine is -C* given the sequence nmemonic and returns the list associated with it. -C* This list is a character string and is used as the last input to -C* UFBINT. -C* -C* -C* BFRHDR ( LUNTBL, CSEQN, PRFFLG, CLIST, NP, IRET ) -C* -C* Input parameters: -C* LUNTBL INTEGER Unit number of BUFR Table file -C* CSEQN CHAR* Sequence mnemonic -C* PRFFLG LOGICAL Flag for profile parms -C* = .true. for multi-level parms -C* -C* Output parameters: -C* CLIST CHAR* String of parm names -C* NP INTEGER Number of parm names in string -C* IRET INTEGER Return code -C* 0 = normal return -C* -1 = Improper table file -C* -2 = Sequence not found -C** -C* Log: -C* K. Brill/NMC 05/94 -C*********************************************************************** -C* - CHARACTER*(*) cseqn, clist - LOGICAL prfflg -C* - LOGICAL found - CHARACTER*80 sbuf -C -C* Set starting column number of parameter list in the table. -C - DATA istart / 14 / -C----------------------------------------------------------------------- - iret = 0 -C -C* Count the number of lines to end of file (used to reposition -C* pointer to original line at the end). -C - found = .true. - lcnt = 1 - DO WHILE ( found ) - READ ( luntbl, 1000, IOSTAT=ios ) sbuf -1000 FORMAT (A) - IF ( ios .ne. 0 ) THEN - found = .false. - ELSE - lcnt = lcnt + 1 - END IF - END DO -C -C* Read from the file for positioning. -C - REWIND luntbl - found = .false. - DO WHILE ( .not. found ) - READ (luntbl, 1000, IOSTAT=ios ) sbuf - IF ( ios .ne. 0 ) THEN - iret = -1 - RETURN - END IF - iq1 = INDEX ( sbuf, '| REFERENCE' ) - iq2 = INDEX ( sbuf, '| UNITS' ) - iq = iq1 * iq2 - IF ( iq .ne. 0 ) found = .true. - END DO -C -C* Get length of sequence mnemonic string. -C - lc = LEN ( cseqn ) - DO WHILE ( cseqn ( lc:lc ) .eq. ' ' ) - lc = lc-1 - END DO -C -C* Start searching backward for the sequence mnemonic. -C - found = .false. - lenc=0 - DO WHILE ( .not. found ) - BACKSPACE luntbl - READ ( luntbl, 1000, IOSTAT=ios ) sbuf - IF ( ios .ne. 0 .or. sbuf (1:2) .eq. '.-' ) THEN - iret = -2 - RETURN - END IF - BACKSPACE luntbl - iq = INDEX ( sbuf ( 1:14 ), cseqn ( 1:lc ) ) - IF ( iq .ne. 0 ) THEN - found = .true. -C -C* Find the last character of last parameter. -C - i = 79 - DO WHILE ( sbuf ( i:i ) .eq. ' ' ) - i = i - 1 - END DO - clist = ' ' - clist = sbuf ( istart:i ) -C -C* Count the number of entries in CLIST. -C - lenc = i - istart + 1 - nspcs = 0 - np = 0 - DO j = 1, lenc - IF ( clist ( j:j ) .eq. ' ' ) nspcs = nspcs + 1 - END DO - np = nspcs + 1 -C -C* Handle profile sequence. -C - IF ( prfflg ) THEN -C sbuf = cseqn ( 1:lc ) // '^ ' // clist ( 1:lenc ) - sbuf = clist ( 1:lenc ) - clist = sbuf - END IF - END IF - END DO -C -C* Reposition file to original record. -C - found = .true. - DO WHILE ( found ) - READ ( luntbl, 1000, IOSTAT=ios ) sbuf - IF ( ios .ne. 0 ) found = .false. - END DO - DO k = 1, lcnt - BACKSPACE luntbl - END DO -C* - RETURN - END diff --git a/sorc/gfs_bufr.fd/bfrize.f b/sorc/gfs_bufr.fd/bfrize.f deleted file mode 100755 index 1183c62f34..0000000000 --- a/sorc/gfs_bufr.fd/bfrize.f +++ /dev/null @@ -1,241 +0,0 @@ - SUBROUTINE BFRIZE ( luntbl, lunbfr, sbset, iyr, imn, idy, ihr, - + seqnam, seqflg, nseq, lvlwise, data, nlvl, - + clist, npp, wrkd, iret ) -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: PROGRAM NAME (up to 20 characters) -C PRGMMR: YOUR NAME ORG: W/NMCXX DATE: YY-MM-DD -C -C ABSTRACT: START ABSTRACT HERE AND INDENT TO COLUMN 5 ON THE -C FOLLOWING LINES. PLEASE PROVIDE A BRIEF DESCRIPTION OF -C WHAT THE SUBPROGRAM DOES. -C -C PROGRAM HISTORY LOG: -C YY-MM-DD ORIGINAL PROGRAMMER'S NAME HERE -C YY-MM-DD MODIFIER1 DESCRIPTION OF CHANGE -C YY-MM-DD MODIFIER2 DESCRIPTION OF CHANGE -C -C USAGE: CALL PROGRAM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C LANGUAGE: INDICATE EXTENSIONS, COMPILER OPTIONS -C MACHINE: IBM SP -C -C$$$ -C*********************************************************************** -C* BFRIZE -C* -C* This subroutine calls Jack Woollen's BUFR encoding routines to -C* write a BUFR message to an output file. SBSET is the Mnemonic -C* for the TABLE A entry associated with this message. It appears -C* in the table referenced by LUNTBL. If LUNTBL = 0, the output -C* BUFR file is closed. -C* -C* The data in the array DATA are ordered according to the individual -C* elements of the Sequences given in SEQNAM. The contents of SEQNAM -C* and SEQFLG and, consequently of DATA, are determined by the BUFR -C* table file referenced by LUNTBL. Each entry in SEQNAM has a list of -C* parameters associated with it in the table. This list is read from -C* the table and the number of parameters is determined. This -C* information is stored in CLIST and NPP for future calls to BFRIZE. -C* If the parameters associated with the entry in SEQNAM exist on NLVL -C* levels, the corresponding array element of SEQFLG must be .true.; -C* otherwise, it is .false. -C* -C* Profile data in array DATA may be stored such that contiguous -C* elements are values of different parameters on the same level -C* (parameter-wise storage) or the same parameter on different levels -C* (level-wise storage). If LVLWISE=.false. parameter-wise storage -C* is assumed; otherwise, LVLWISE=.true. and level-wise storage is -C* assumed. -C* -C* The example below shows the contents of SEQNAM, SEQFLG, and DATA -C* for a case when NLVL=3, LVLWISE=.true., and the table file has the -C* following entries for the Mnemonic Sequences: -C* -C* MNEMONIC | SEQUENCE -C* -C* MODELOUT | HDR {PROF} SFC -C* HDR | RLAT RLON -C* PROF | PRES TMPK -C* SFC | PMSL PRCP -C* -C* SEQNAM and SEQFLG have the following assigned entries: -C* -C* INDEX SEQNAM SEQFLG -C* 1 HDR .false. -C* 2 PROF .true. -C* 3 SFC .false. -C* -C* DATA must contain the following values in this order: -C* -C* DATA (1) = rlat DATA (6) = tmpk (1) -C* DATA (2) = rlon DATA (7) = tmpk (2) -C* DATA (3) = pres (1) DATA (8) = tmpk (3) -C* DATA (4) = pres (2) DATA (9) = pmsl -C* DATA (5) = pres (3) DATA (10) = prcp -C* -C* The lower-case names above signify numerical values of the -C* parameters. The values of multiple level parameters are stored -C* contiguously. -C* -C* To add a new output parameter, update the table file by adding the -C* Mnemonic for the parameter to an existing Sequence or by adding -C* a new Sequence. If a new Sequence has been added, SEQNAM and -C* SEQFLG must be updated accordingly. In any case, the new output -C* parameter value must be placed in the correct position within the -C* array DATA. -C* -C* CLIST contains the lists of parameter names for each element of -C* SEQNAM. If CLIST (1) is blank, BFRHDR is called with SEQNAM and -C* SEQFLG as inputs to load the names of the parameters into CLIST; -C* otherwise, the names in CLIST are used. For every element of -C* SEQNAM there is a corresponding element of CLIST. For each element -C* of CLIST, there is a corresponding element of NPP giving the number -C* of parameter names in the list. -C* -C* DATA (i) = 10.E+10 is the missing value. -C* -C* WRKD is a scratch array and should be dimensioned the same size as -C* data. WRKD is not used if LVLWISE=.false. -C* -C* BFRIZE ( LUNTBL, LUNBFR, SBSET, IYR, IMN, IDY, IHR, -C* SEQNAM, SEQFLG, NSEQ, LVLWISE, DATA, NLVL, CLIST, NPP, -C* WRKD, IRET ) -C* -C* Input parameters: -C* LUNTBL INTEGER Unit number of BUFR Table file -C* LUNBFR INTEGER Unit number of BUFR data file -C* SBSET CHAR* BUFR subset name -C* IYR INTEGER 4-digit year -C* IMN INTEGER 2-digit month -C* IDY INTEGER 2-digit day -C* IHR INTEGER 2-digit cycle hour -C* SEQNAM (NSEQ) CHAR* Mnemonic Sequence names -C* SEQFLG (NSEQ) LOGICAL Multi-level flag -C* NSEQ INTEGER Number of Sequence names & flags -C* LVLWISE LOGICAL Level-wise profile data flag -C* DATA (*) REAL Data array -C* NLVL INTEGER Number of levels -C* -C* Input and Output parameters: -C* CLIST (NSEQ) CHAR* Parameter name lists -C* NPP (NSEQ) INTEGER Number of parameter names -C* -C* Output parameters: -C* WRKD (*) REAL Array of reordered profile data -C* IRET INTEGER Return code -C* 0 = normal return -C** -C* Log: -C* K. Brill/NMC 05/94 -C* K. Brill/NMC 06/94 Added LVLWISE, CLIST, NPP, WRKD -C 98-08-28 ROZWODOSKI MADE CHANGES FOR Y2K COMPLIANCE. -C*********************************************************************** - REAL*8 data (*) - INTEGER npp (*), nlvl (*) - CHARACTER*(*) seqnam (*), sbset - LOGICAL seqflg (*), lvlwise - CHARACTER*(*) clist (*) - REAL*8 wrkd (*) -C----------------------------------------------------------------------- - iret = 0 -c print*,'Bufriz.f is creating bufr file' - -C -C* Close BUFR file if LUNTBL = 0. -C - IF ( luntbl .eq. 0 ) THEN - CALL CLOSBF ( lunbfr ) - RETURN - END IF -C -C* Check the status of the output BUFR file. -C - CALL STATUS ( lunbfr, lun, iopn, imm ) - IF ( iopn .eq. 0 ) THEN - CALL SETBLOCK(1) - CALL OPENBF ( lunbfr, 'OUT', luntbl ) - CALL DATELEN ( 10 ) - END IF -C -C* Open a new message. -C - idate = iyr * 1000000 + imn * 10000 + idy * 100 + ihr -c print *, 'Bufriz idate = ', idate - CALL OPENMB ( lunbfr, sbset, idate ) -C -C* Create the parameter name lists if CLIST (1) is blank. -C -c print *, 'clist (1) = ', clist(1) -c print *, 'npp (1) = ', npp(1) -c print *, 'seqnam (1) = ', seqnam(1) -c print *, 'seqflg (1) = ', seqflg(1) -c print *, 'nseq = ', nseq - IF ( clist (1) .eq. ' ' ) THEN - DO is = 1, nseq - CALL BFRHDR ( luntbl, seqnam (is), seqflg (is), - + clist (is), npp (is), iret ) - IF ( iret .ne. 0 ) RETURN - END DO - END IF -C -C* Load the sequences. -C - idpntr = 1 - indxlv = 0 - DO is = 1, nseq - np = npp (is) - IF ( seqflg (is) ) THEN - indxlv = indxlv + 1 - IF ( lvlwise ) THEN -C -C* This is level-wise multi-level data. -C - istrt = idpntr - indx = 0 - DO k = 1, nlvl (indxlv) - DO ip = 1, np - indx = indx + 1 - wrkd ( indx ) = - + data ( istrt + (ip-1) * nlvl (indxlv) ) - END DO - istrt = istrt + 1 - END DO - CALL UFBINT ( lunbfr, wrkd, np, nlvl (indxlv), - + irtrn, clist (is) ) - ELSE -C -C* This is parameter-wise multi-level data. -C - CALL UFBINT ( lunbfr, data (idpntr), np, - + nlvl (indxlv), irtrn, clist (is) ) - END IF - idpntr = idpntr + np * nlvl (indxlv) - ELSE -C -C* This is single-level data. -C - CALL UFBINT ( lunbfr, data (idpntr), - + np, 1, irtrn, clist (is) ) - idpntr = idpntr + np - END IF - END DO - CALL WRITSB ( lunbfr ) -C* - RETURN - END diff --git a/sorc/gfs_bufr.fd/buff.f b/sorc/gfs_bufr.fd/buff.f deleted file mode 100755 index 5441fbf5a8..0000000000 --- a/sorc/gfs_bufr.fd/buff.f +++ /dev/null @@ -1,92 +0,0 @@ - subroutine buff(nint1,nend1,nint3,nend3,npoint,idate,jdate,levs, - & dird,lss,istat,sbset,seqflg,clist,npp,wrkd) - character*150 dird, fnbufr, fmto -!! integer nint, nend, npoint, idate(4), levs, jdate - integer nint1, nend1, nint3, nend3 - integer npoint, idate(4), levs, jdate - real*8 data(6*levs+25), wrkd(1) - integer idtln, nf, nfile, np - integer lss, istat(npoint), ios - CHARACTER*150 FILESEQ - CHARACTER*8 SBSET - LOGICAL SEQFLG(4) - CHARACTER*80 CLIST(4) - INTEGER NPP(4) - CHARACTER*8 SEQNAM(4) - FMTO = '(A,".",I6.6,".",I10)' - idtln = 8 - nfile = 20 -C print *, 'inside buff.f nint1,nend1,nint3,nend3,jdate=' -C print *, nint1,nend1,nint3,nend3,jdate - do nf = 0, nend1, nint1 - nfile = nfile + 1 - rewind nfile - enddo - do nf = nend1+nint3, nend3, nint3 - nfile = nfile + 1 - rewind nfile - enddo - do np = 1, npoint -C OPEN BUFR OUTPUT FILE. - write(fnbufr,fmto) dird(1:lss),istat(np),jdate - print *, ' fnbufr =', fnbufr - open(unit=19,file=fnbufr,form='unformatted', - & status='new', iostat=ios) - IF ( ios .ne. 0 ) THEN - WRITE (6,*) ' CANNOT open ', 19 - STOP - END IF - CALL OPENBF ( 19, 'OUT', 1 ) - nfile = 20 - do nf = 0, nend1, nint1 - nfile = nfile + 1 - read(nfile) data - if(np.eq.1) then - print *, ' creating bufr file for np, nfile =', - & np, nfile - endif -CC WRITE DATA MESSAGE TO BUFR OUTPUT FILE. -CC LUNTBL=-9 BECAUSE BUFR TABLE FILE NOT USED HERE. -CC SEQNAM=XXXXXX BECAUSE MNEMONIC SEQUENCE NAMES NOT USED HERE. - CALL BFRIZE ( -9, 19, SBSET, - & idate(4), iDATE(2), - & iDATE(3), iDATE(1), - & 'XXXXXX', SEQFLG, 4, .FALSE., DATA, levs, - & CLIST, NPP, WRKD, IRET ) - IF ( IRET .NE. 0 ) THEN - PRINT *,' BFRIZE FAILED ' - ENDIF -c 300 continue - enddo -C 3hourly output starts here -!! print *, 'buff.f nfile,nend1+nint3,nend3,nint3=' -!! print *, nfile,nend1+nint3,nend3,nint3 - do nf = nend1+nint3, nend3, nint3 - nfile = nfile + 1 - read(nfile) data - if(np.eq.1) then - print *, ' creating bufr file for np, nfile =', - & np, nfile - endif -C print *, 'read2 in fort(nfile) =', nfile -CC WRITE DATA MESSAGE TO BUFR OUTPUT FILE. -CC LUNTBL=-9 BECAUSE BUFR TABLE FILE NOT USED HERE. -CC SEQNAM=XXXXXX BECAUSE MNEMONIC SEQUENCE NAMES NOT USED HERE. - CALL BFRIZE ( -9, 19, SBSET, - & idate(4), iDATE(2), - & iDATE(3), iDATE(1), - & 'XXXXXX', SEQFLG, 4, .FALSE., DATA, levs, - & CLIST, NPP, WRKD, IRET ) - IF ( IRET .NE. 0 ) THEN - PRINT *,' BFRIZE FAILED ' - ENDIF - enddo - CALL BFRIZE ( 0, 19, SBSET, - & IDATE(4), IDATE(2), - & IDATE(3), IDATE(1), - & 'XXXXXX', SEQFLG, 4, .FALSE., DATA, levs, - & CLIST, NPP, WRKD, IRET ) - call closbf(19) - enddo - return - end diff --git a/sorc/gfs_bufr.fd/calpreciptype.f b/sorc/gfs_bufr.fd/calpreciptype.f deleted file mode 100644 index 2307231337..0000000000 --- a/sorc/gfs_bufr.fd/calpreciptype.f +++ /dev/null @@ -1,1616 +0,0 @@ -SUBROUTINE CALPRECIPTYPE(kdt,nrcm,im,ix,lm,lp1,randomno, & - xlat,xlon, & - gt0,gq0,prsl,prsi,PREC, & !input - phii,n3dfercld,TSKIN,SR,phy_f3d, & !input - DOMR,DOMZR,DOMIP,DOMS) !output -! SUBROUTINE CALPRECIPTYPE(nrcm,randomno,im,lm,lp1,T,Q,PMID,PINT,PREC, & !input -! ZINT,n3dfercld,TSKIN,SR,F_RimeF, & !input -! DOMR,DOMZR,DOMIP,DOMS) !output -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! . . . -! SUBPROGRAM: CALPRECIPTYPE COMPUTE DOMINANT PRECIP TYPE -! PRGRMMR: CHUANG ORG: W/NP2 DATE: 2008-05-28 -! -! -! ABSTRACT: -! THIS ROUTINE COMPUTES PRECIPITATION TYPE. -! . It is adopted from post but was made into a column to used by GFS model -! -! -! use vrbls3d -! use vrbls2d -! use soil -! use masks -! use params_mod -! use ctlblk_mod -! use rqstfld_mod - USE FUNCPHYS, ONLY : gfuncphys,fpvs,ftdp,fpkap,ftlcl,stma,fthe - USE PHYSCONS -!- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - implicit none -! -! INCLUDE "mpif.h" -! -! IN NGM SUBROUTINE OUTPUT WE FIND THE FOLLOWING COMMENT. -! "IF THE FOLLOWING THRESHOLD VALUES ARE CHANGED, CONTACT -! TDL/SYNOPTIC-SCALE TECHNIQUES BRANCH (PAUL DALLAVALLE -! AND JOHN JENSENIUS). THEY MAY BE USING IT IN ONE OF -! THEIR PACKING CODES." THE THRESHOLD VALUE IS 0.01 INCH -! OR 2.54E-4 METER. PRECIPITATION VALUES LESS THAN THIS -! THRESHOLD ARE SET TO MINUS ONE TIMES THIS THRESHOLD. - - real,PARAMETER :: PTHRESH = 0.0 -! -! SET CELCIUS TO KELVIN AND SECOND TO HOUR CONVERSION. - integer,PARAMETER :: NALG = 5 -! -! DECLARE VARIABLES. -! - integer,intent(in) :: kdt,nrcm,im,ix,lm,lp1,n3dfercld - real,intent(in) :: xlat(im),xlon(im) - real,dimension(im),intent(in) :: PREC,SR,TSKIN - real,intent(in) :: randomno(ix,nrcm) - real,dimension(ix,LM),intent(in) :: gt0,gq0,prsl,phy_f3d - real,dimension(ix,lp1),intent(in) :: prsi,phii - real,dimension(im),intent(out) :: DOMR,DOMZR,DOMIP,DOMS - INTEGER :: IWX1,IWX4,IWX5 - REAL :: IWX2,IWX3 - REAL :: ES,QC - REAL :: SLEET(NALG),RAIN(NALG),FREEZR(NALG),SNOW(NALG) - real,dimension(LM) :: T,Q,PMID,F_RimeF - real,dimension(lp1) :: pint,zint - REAL, ALLOCATABLE :: RH(:) - REAL(kind=kind_phys), ALLOCATABLE :: TD8(:) - integer :: I,IWX,ISNO,IIP,IZR,IRAIN,k,k1 - real :: time_vert,time_ncep,time_ramer,time_bourg,time_revised,& - time_dominant,btim,timef - real(kind=kind_phys) :: pv8,pr8,pk8,tr8,tdpd8,tlcl8,thelcl8 - real(kind=kind_phys) :: qwet8,t8(lm) - real(kind=kind_phys),allocatable :: twet8(:) - -! convert geopotential to height -! do l=1,lp1 -! zint(l)=zint(l)/con_g -! end do -! DON'T FORGET TO FLIP 3D ARRAYS AROUND BECAUSE GFS COUNTS FROM BOTTOM UP - - ALLOCATE ( RH(LM),TD8(LM),TWET8(LM) ) - -! Create look up table - call gfuncphys - - time_vert = 0. - time_ncep = 0. - time_ramer = 0. - time_bourg = 0. - time_revised = 0. - - do i=1,im -! print *, 'in calprecip xlat/xlon=', xlat(im),xlon(im),'levs=',lm - do k=1,lm - k1 = lm-k+1 - t8(k1) = gt0(i,k) - q(k1) = gq0(i,k) - pmid(k1) = prsl(i,k) - f_rimef(k1) = phy_f3d(i,k) - pv8 = pmid(k1)*q(k1)/(con_eps-con_epsm1*q(k1)) - td8(k1) = ftdp(pv8) - tdpd8 = t8(k1)-td8(k1) - if(pmid(k1)>=50000.)then ! only compute twet below 500mb to save time - if(tdpd8.gt.0.) then - pr8 = pmid(k1) - tr8 = t8(k1) - pk8 = fpkap(pr8) - tlcl8 = ftlcl(tr8,tdpd8) - thelcl8 = fthe(tlcl8,pk8*tlcl8/tr8) - call stma(thelcl8,pk8,twet8(k1),qwet8) - else - twet8(k1)=t8(k1) - endif - endif - ES = FPVS(T8(k1)) - ES = MIN(ES,PMID(k1)) - QC = CON_EPS*ES/(PMID(k1)+CON_EPSM1*ES) - RH(k1) = MAX(con_epsq,Q(k1))/QC - k1 = lp1-k+1 - pint(k1) = prsi(i,k) - zint(k1) = phii(i,k) !height in meters - enddo - pint(1) = prsi(i,lp1) - zint(1) = phii(i,lp1) - -! print*,'debug in calpreciptype: i,im,lm,lp1,xlon,xlat,prec,tskin,sr,nrcm,randomno,n3dfercld ', & -! i,im,lm,lp1,xlon(i)*57.29578,xlat(i)*57.29578,prec(i),tskin(i),sr(i), & -! nrcm,randomno(i,1:nrcm),n3dfercld -! do l=1,lm -! print*,'debug in calpreciptype: l,t,q,p,pint,z,twet', & -! l,t(l),q(l), & -! pmid(l),pint(l),zint(l),twet(l) -! end do -! print*,'debug in calpreciptype: lp1,pint,z ', lp1,pint(lp1),zint(lp1) -! end if -! end debug print statement - - CALL CALWXT(lm,lp1,T8(1),Q(1),PMID(1),PINT(1),PREC(i), & - PTHRESH,con_fvirt,con_rog,con_epsq, & - ZINT(1),IWX1,TWET8(1)) - IWX = IWX1 - ISNO = MOD(IWX,2) - IIP = MOD(IWX,4)/2 - IZR = MOD(IWX,8)/4 - IRAIN = IWX/8 - SNOW(1) = ISNO*1.0 - SLEET(1) = IIP*1.0 - FREEZR(1) = IZR*1.0 - RAIN(1) = IRAIN*1.0 -! print *, 'inside calprecip after calwxt iwx =',iwx -! DOMINANT PRECIPITATION TYPE -!GSM IF DOMINANT PRECIP TYPE IS REQUESTED, 4 MORE ALGORITHMS -!GSM WILL BE CALLED. THE TALLIES ARE THEN SUMMED IN -!GSM CALWXT_DOMINANT - - -! write(0,*)' i=',i,' lm=',lm,' lp1=',lp1,' T=',T(1),q(1),pmid(1) & -! &,' pint=',pint(1),' prec=',prec(i),' pthresh=',pthresh - - CALL CALWXT_RAMER(lm,lp1,T8(1),Q(1),PMID(1),RH(1),TD8(1), & - PINT(1),PREC(i),PTHRESH,IWX2) -! - IWX = NINT(IWX2) - ISNO = MOD(IWX,2) - IIP = MOD(IWX,4)/2 - IZR = MOD(IWX,8)/4 - IRAIN = IWX/8 - SNOW(2) = ISNO*1.0 - SLEET(2) = IIP*1.0 - FREEZR(2) = IZR*1.0 - RAIN(2) = IRAIN*1.0 -! print *, 'inside calprecip after ramer iwx=',iwx -! BOURGOUIN ALGORITHM - CALL CALWXT_BOURG(LM,LP1,randomno(i,1),con_g,PTHRESH, & - & T8(1),Q(1),PMID(1),PINT(1),PREC(i),ZINT(1),IWX3) - -! - IWX = NINT(IWX3) - ISNO = MOD(IWX,2) - IIP = MOD(IWX,4)/2 - IZR = MOD(IWX,8)/4 - IRAIN = IWX/8 - SNOW(3) = ISNO*1.0 - SLEET(3) = IIP*1.0 - FREEZR(3) = IZR*1.0 - RAIN(3) = IRAIN*1.0 -! print *, 'inside calprecip after bourg iwx=',iwx - -! REVISED NCEP ALGORITHM - CALL CALWXT_REVISED(LM,LP1,T8(1),Q(1),PMID(1),PINT(1),PREC(i),PTHRESH, & - con_fvirt,con_rog,con_epsq,ZINT(1),TWET8(1),IWX4) - -! - IWX = IWX4 - ISNO = MOD(IWX,2) - IIP = MOD(IWX,4)/2 - IZR = MOD(IWX,8)/4 - IRAIN = IWX/8 - SNOW(4) = ISNO*1.0 - SLEET(4) = IIP*1.0 - FREEZR(4) = IZR*1.0 - RAIN(4) = IRAIN*1.0 -! print *, 'inside calprecip after revised iwx=',iwx -! EXPLICIT ALGORITHM (UNDER 18 NOT ADMITTED WITHOUT PARENT -! OR GUARDIAN) - - IF(n3dfercld == 3) then ! Ferrier's scheme - CALL CALWXT_EXPLICIT(LM,PTHRESH, & - TSKIN(i),PREC(i),SR(i),F_RimeF(1),IWX5) - else - IWX5 = 0 - endif -! - IWX = IWX5 - ISNO = MOD(IWX,2) - IIP = MOD(IWX,4)/2 - IZR = MOD(IWX,8)/4 - IRAIN = IWX/8 - SNOW(5) = ISNO*1.0 - SLEET(5) = IIP*1.0 - FREEZR(5) = IZR*1.0 - RAIN(5) = IRAIN*1.0 -! - CALL CALWXT_DOMINANT(NALG,PREC(i),PTHRESH,RAIN(1),FREEZR(1),SLEET(1), & - SNOW(1),DOMR(i),DOMZR(i),DOMIP(i),DOMS(i)) - -! if (DOMS(i).eq.1.) then -! print *, 'Found SNOW at xlat/xlon',xlat,xlon -! elseif (DOMR(i).eq.1.) then -! print *, 'Found RAIN at xlat/xlon',xlat,xlon -! elseif(DOMZR(i).eq.1.) then -! print *, 'Found FREEZING RAIN at xlat/xlon',xlat,xlon -! elseif(DOMIP(i).eq.1.) then -! print *, 'Found ICE at xlat/xlon',xlat,xlon -! endif -! print *, 'In calpre DOMS,DOMR,DOMZR,DOMIP =', int(DOMS),int(DOMR),int(DOMZR),int(DOMIP) - - enddo ! end loop for i - - DEALLOCATE (TWET8,RH,TD8) - RETURN - END -! -!&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&& -! - SUBROUTINE CALWXT(lm,lp1,T,Q,PMID,PINT,PREC, & - PTHRESH,D608,ROG,EPSQ, & - ZINT,IWX,TWET) -! -! FILE: CALWXT.f -! WRITTEN: 11 NOVEMBER 1993, MICHAEL BALDWIN -! REVISIONS: -! 30 SEPT 1994-SETUP NEW DECISION TREE (M BALDWIN) -! 12 JUNE 1998-CONVERSION TO 2-D (T BLACK) -! 01-10-25 H CHUANG - MODIFIED TO PROCESS HYBRID MODEL OUTPUT -! 02-01-15 MIKE BALDWIN - WRF VERSION -! -! -! ROUTINE TO COMPUTE PRECIPITATION TYPE USING A DECISION TREE -! APPROACH THAT USES VARIABLES SUCH AS INTEGRATED WET BULB TEMP -! BELOW FREEZING AND LOWEST LAYER TEMPERATURE -! -! SEE BALDWIN AND CONTORNO PREPRINT FROM 13TH WEATHER ANALYSIS -! AND FORECASTING CONFERENCE FOR MORE DETAILS -! (OR BALDWIN ET AL, 10TH NWP CONFERENCE PREPRINT) -! -! use params_mod -! use ctlblk_mod -!- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - implicit none -! -! INPUT: -! T,Q,PMID,HTM,LMH,PREC,ZINT -! - integer,intent(in):: lm,lp1 -! real,intent(in):: pthresh - real,dimension(LM),intent(in) :: Q,PMID - real*8,dimension(LM),intent(in) :: T,TWET - real,dimension(LP1),intent(in) :: ZINT,PINT - integer,intent(out) :: IWX - real,intent(in) :: PREC,PTHRESH,D608,ROG,EPSQ -! real,intent(out) :: ZWET - - -! OUTPUT: -! IWX - INSTANTANEOUS WEATHER TYPE. -! ACTS LIKE A 4 BIT BINARY -! 1111 = RAIN/FREEZING RAIN/ICE PELLETS/SNOW -! WHERE THE ONE'S DIGIT IS FOR SNOW -! THE TWO'S DIGIT IS FOR ICE PELLETS -! THE FOUR'S DIGIT IS FOR FREEZING RAIN -! AND THE EIGHT'S DIGIT IS FOR RAIN -! -! INTERNAL: -! -! REAL, ALLOCATABLE :: TWET(:) - real, parameter :: D00=0.0 - integer KARR,LICEE - real TCOLD,TWARM - -! SUBROUTINES CALLED: -! WETBULB -! -! -! INITIALIZE WEATHER TYPE ARRAY TO ZERO (IE, OFF). -! WE DO THIS SINCE WE WANT IWX TO REPRESENT THE -! INSTANTANEOUS WEATHER TYPE ON RETURN. -! -! -! ALLOCATE LOCAL STORAGE -! - - integer L,LICE,IWRML,IFRZL - real PSFCK,TDCHK,A,TDKL,TDPRE,TLMHK,TWRMK,AREAS8,AREAP4, & - SURFW,SURFC,DZKL,AREA1,PINTK1,PINTK2,PM150,PKL,TKL,QKL - -! ALLOCATE ( TWET(LM) ) -! -!!$omp parallel do - IWX = 0 -! ZWET=SPVAL -! -!!$omp parallel do -!!$omp& private(a,pkl,psfck,qkl,tdchk,tdkl,tdpre,tkl) - -! -! SKIP THIS POINT IF NO PRECIP THIS TIME STEP -! - IF (PREC.LE.PTHRESH) GOTO 800 -! -! FIND COLDEST AND WARMEST TEMPS IN SATURATED LAYER BETWEEN -! 70 MB ABOVE GROUND AND 500 MB -! ALSO FIND HIGHEST SATURATED LAYER IN THAT RANGE -! -!meb - PSFCK=PINT(LM+1) -!meb - TDCHK=2.0 - 760 TCOLD=T(LM) - TWARM=T(LM) - LICEE=LM -! - DO 775 L=1,LM - QKL=Q(L) - QKL=MAX(EPSQ,QKL) - TKL=T(L) - PKL=PMID(L) -! -! SKIP PAST THIS IF THE LAYER IS NOT BETWEEN 70 MB ABOVE GROUND -! AND 500 MB -! - IF (PKL.LT.50000.0.OR.PKL.GT.PSFCK-7000.0) GOTO 775 - A=LOG(QKL*PKL/(6.1078*(0.378*QKL+0.622))) - TDKL=(237.3*A)/(17.269-A)+273.15 - TDPRE=TKL-TDKL - IF (TDPRE.LT.TDCHK.AND.TKL.LT.TCOLD) TCOLD=TKL - IF (TDPRE.LT.TDCHK.AND.TKL.GT.TWARM) TWARM=TKL - IF (TDPRE.LT.TDCHK.AND.L.LT.LICEE) LICEE=L - 775 CONTINUE -! -! IF NO SAT LAYER AT DEW POINT DEP=TDCHK, INCREASE TDCHK -! AND START AGAIN (BUT DON'T MAKE TDCHK > 6) -! - IF (TCOLD==T(LM).AND.TDCHK<6.0) THEN - TDCHK=TDCHK+2.0 - GOTO 760 - ENDIF - 800 CONTINUE -! -! LOWEST LAYER T -! - KARR=0 - IF (PREC.LE.PTHRESH) GOTO 850 - TLMHK=T(LM) -! -! DECISION TREE TIME -! - IF (TCOLD>269.15) THEN - IF (TLMHK.LE.273.15) THEN -! TURN ON THE FLAG FOR -! FREEZING RAIN = 4 -! IF ITS NOT ON ALREADY -! IZR=MOD(IWX(I,J),8)/4 -! IF (IZR.LT.1) IWX(I,J)=IWX(I,J)+4 - IWX=IWX+4 - GOTO 850 - ELSE -! TURN ON THE FLAG FOR -! RAIN = 8 -! IF ITS NOT ON ALREADY -! IRAIN=IWX(I,J)/8 -! IF (IRAIN.LT.1) IWX(I,J)=IWX(I,J)+8 - IWX=IWX+8 - GOTO 850 - ENDIF - ENDIF - KARR=1 - 850 CONTINUE -! -! COMPUTE WET BULB ONLY AT POINTS THAT NEED IT -! -! CALL WETBULB(lm,T,Q,PMID,KARR,TWET) -! CALL WETFRZLVL(TWET,ZWET) -! -!!$omp parallel do -!!$omp& private(area1,areap4,areas8,dzkl,ifrzl,iwrml,lice, -!!$omp& lmhk,pintk1,pintk2,pm150,psfck,surfc,surfw, -!!$omp& tlmhk,twrmk) - - IF(KARR.GT.0)THEN - LICE=LICEE -!meb - PSFCK=PINT(LM+1) -!meb - TLMHK=T(LM) - TWRMK=TWARM -! -! TWET AREA VARIABLES -! CALCULATE ONLY WHAT IS NEEDED -! FROM GROUND TO 150 MB ABOVE SURFACE -! FROM GROUND TO TCOLD LAYER -! AND FROM GROUND TO 1ST LAYER WHERE WET BULB T < 0.0 -! -! PINTK1 IS THE PRESSURE AT THE BOTTOM OF THE LAYER -! PINTK2 IS THE PRESSURE AT THE TOP OF THE LAYER -! -! AREAP4 IS THE AREA OF TWET ABOVE -4 C BELOW HIGHEST SAT LYR -! - AREAS8=D00 - AREAP4=D00 - SURFW =D00 - SURFC =D00 -! - DO 1945 L=LM,LICE,-1 - DZKL=ZINT(L)-ZINT(L+1) - AREA1=(TWET(L)-269.15)*DZKL - IF (TWET(L).GE.269.15) AREAP4=AREAP4+AREA1 - 1945 CONTINUE -! - IF (AREAP4.LT.3000.0) THEN -! TURN ON THE FLAG FOR -! SNOW = 1 -! IF ITS NOT ON ALREADY -! ISNO=MOD(IWX(I,J),2) -! IF (ISNO.LT.1) IWX(I,J)=IWX(I,J)+1 - IWX=IWX+1 - GO TO 1900 - ENDIF -! -! AREAS8 IS THE NET AREA OF TWET W.R.T. FREEZING IN LOWEST 150MB -! - PINTK1=PSFCK - PM150=PSFCK-15000. -! - DO 1955 L=LM,1,-1 - PINTK2=PINT(L) - IF(PINTK1.LT.PM150)GO TO 1950 - DZKL=ZINT(L)-ZINT(L+1) -! -! SUM PARTIAL LAYER IF IN 150 MB AGL LAYER -! - IF(PINTK2.LT.PM150) & - DZKL=T(L)*(Q(L)*D608+1.0)*ROG*LOG(PINTK1/PM150) - AREA1=(TWET(L)-273.15)*DZKL - AREAS8=AREAS8+AREA1 - 1950 PINTK1=PINTK2 - 1955 CONTINUE -! -! SURFW IS THE AREA OF TWET ABOVE FREEZING BETWEEN THE GROUND -! AND THE FIRST LAYER ABOVE GROUND BELOW FREEZING -! SURFC IS THE AREA OF TWET BELOW FREEZING BETWEEN THE GROUND -! AND THE WARMEST SAT LAYER -! - IFRZL=0 - IWRML=0 -! - DO 2050 L=LM,1,-1 - IF (IFRZL.EQ.0.AND.T(L).LT.273.15) IFRZL=1 - IF (IWRML.EQ.0.AND.T(L).GE.TWRMK) IWRML=1 -! - IF (IWRML.EQ.0.OR.IFRZL.EQ.0) THEN -! if(pmid(l) < 50000.)print*,'need twet above 500mb' - DZKL=ZINT(L)-ZINT(L+1) - AREA1=(TWET(L)-273.15)*DZKL - IF(IFRZL.EQ.0.AND.TWET(L).GE.273.15)SURFW=SURFW+AREA1 - IF(IWRML.EQ.0.AND.TWET(L).LE.273.15)SURFC=SURFC+AREA1 - ENDIF - 2050 CONTINUE - IF(SURFC.LT.-3000.0.OR. & - (AREAS8.LT.-3000.0.AND.SURFW.LT.50.0)) THEN -! TURN ON THE FLAG FOR -! ICE PELLETS = 2 -! IF ITS NOT ON ALREADY -! IIP=MOD(IWX(I,J),4)/2 -! IF (IIP.LT.1) IWX(I,J)=IWX(I,J)+2 - IWX=IWX+2 - GOTO 1900 - ENDIF -! - IF(TLMHK.LT.273.15) THEN -! TURN ON THE FLAG FOR -! FREEZING RAIN = 4 -! IF ITS NOT ON ALREADY -! IZR=MOD(IWX(K),8)/4 -! IF (IZR.LT.1) IWX(K)=IWX(K)+4 - IWX=IWX+4 - ELSE -! TURN ON THE FLAG FOR -! RAIN = 8 -! IF ITS NOT ON ALREADY -! IRAIN=IWX(K)/8 -! IF (IRAIN.LT.1) IWX(K)=IWX(K)+8 - IWX=IWX+8 - ENDIF - ENDIF - 1900 CONTINUE -!--------------------------------------------------------- -! DEALLOCATE (TWET) - - RETURN - END -! -! -!ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc -! -! DoPhase is a subroutine written and provided by Jim Ramer at NOAA/FSL -! -! Ramer, J, 1993: An empirical technique for diagnosing precipitation -! type from model output. Preprints, 5th Conf. on Aviation -! Weather Systems, Vienna, VA, Amer. Meteor. Soc., 227-230. -! -! CODE ADAPTED FOR WRF POST 24 AUGUST 2005 G MANIKIN -!ccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc -! - SUBROUTINE CALWXT_RAMER(lm,lp1, & - T,Q,PMID,RH,TD,PINT,PREC,PTHRESH,PTYP) - -! SUBROUTINE dophase(pq, ! input pressure sounding mb -! + t, ! input temperature sounding K -! + pmid, ! input pressure -! + pint, ! input interface pressure -! + q, ! input spec humidityfraction -! + lmh, ! input number of levels in sounding -! + prec, ! input amount of precipitation -! + ptyp) ! output(2) phase 2=Rain, 3=Frzg, 4=Solid, -! 6=IP JC 9/16/99 -! use params_mod -! use CTLBLK_mod -!- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - implicit none -! - real,PARAMETER :: twice=266.55,rhprcp=0.80,deltag=1.02, & - & emelt=0.045,rlim=0.04,slim=0.85 - real,PARAMETER :: twmelt=273.15,tz=273.15,efac=1.0 ! specify in params now -! - INTEGER*4 i, k1, lll, k2, toodry -! - REAL xxx ,mye, icefrac - integer,intent(in) :: lm,lp1 - real,DIMENSION(LM),intent(in) :: Q,PMID,RH - real*8,DIMENSION(LM),intent(in) :: T,TD - real,DIMENSION(LP1),intent(in) :: PINT - real,intent(in) :: PREC,PTHRESH - real,intent(out) :: PTYP -! - real,DIMENSION(LM) :: TQ,PQ,RHQ - real,DIMENSION(LM) :: TWQ -! - integer J,L,LEV,ii - real RHMAX,TWMAX,PTOP,dpdrh,twtop,rhtop,wgt1,wgt2, & - rhavg,dtavg,dpk,ptw,pbot -! real b,qtmp,rate,qc - real,external :: xmytw -! -! Initialize. - icefrac = -9999. -! - - PTYP = 0 - DO L = 1,LM - LEV = LP1 - L -! P(L)=PMID(L) -! QC=PQ0/P(L) * EXP(A2*(T(L)-A3)/(T(L)-A4)) -!GSM forcing Q (QTMP) to be positive to deal with negative Q values -! causing problems later in this subroutine -! QTMP=MAX(H1M12,Q(L)) -! RHQTMP(LEV)=QTMP/QC - RHQ(LEV) = RH(L) - PQ(LEV) = PMID(L) * 0.01 - TQ(LEV) = T(L) - enddo - - -! -! SKIP THIS POINT IF NO PRECIP THIS TIME STEP -! - IF (PREC <= PTHRESH) return - -! -!CC RATE RESTRICTION REMOVED BY JOHN CORTINAS 3/16/99 -! -! Construct wet-bulb sounding, locate generating level. - twmax = -999.0 - rhmax = 0.0 - k1 = 0 ! top of precip generating layer - k2 = 0 ! layer of maximum rh -! - IF (rhq(1) < rhprcp) THEN - toodry = 1 - ELSE - toodry = 0 - END IF -! - pbot = pq(1) -! NQ=LM - DO L = 1, lm -! xxx = tdofesat(esat(tq(L))*rhq(L)) - xxx = td(l) !HC: use TD consistent with GFS ice physics - if (xxx < -500.) return - twq(L) = xmytw(tq(L),xxx,pq(L)) - twmax = max(twq(L),twmax) - IF (pq(L) >= 400.0) THEN - IF (rhq(L) > rhmax) THEN - rhmax = rhq(L) - k2 = L - END IF -! - IF (L /= 1) THEN - IF (rhq(L) >= rhprcp .or. toodry == 0) THEN - IF (toodry /= 0) THEN - dpdrh = log(pq(L)/pq(L-1)) / (rhq(L)-RHQ(L-1)) - pbot = exp(log(pq(L))+(rhprcp-rhq(L))*dpdrh) -! - ptw = pq(L) - toodry = 0 - ELSE IF (rhq(L)>= rhprcp) THEN - ptw = pq(L) - ELSE - toodry = 1 - dpdrh = log(pq(L)/pq(L-1)) / (rhq(L)-rhq(L-1)) - ptw = exp(log(pq(L))+(rhprcp-rhq(L))*dpdrh) - -!lin dpdrh = (Pq(i)-Pq(i-1))/(Rhq(i)-Rhq(i-1)) -!lin ptw = Pq(i)+(rhprcp-Rhq(i))*dpdrh -! - END IF -! - IF (pbot/ptw >= deltag) THEN -!lin If (pbot-ptw.lt.deltag) Goto 2003 - k1 = L - ptop = ptw - END IF - END IF - END IF - END IF - enddo -! -! Gross checks for liquid and solid precip which dont require generating level. -! - IF (twq(1) >= 273.15+2.0) THEN - ptyp = 8 ! liquid - icefrac = 0.0 - return - END IF -! - IF (twmax <= twice) THEN - icefrac = 1.0 - ptyp = 1 ! solid - return - END IF -! -! Check to see if we had no success with locating a generating level. -! - IF (k1 == 0) return -! - IF (ptop == pq(k1)) THEN - twtop = twq(k1) - rhtop = rhq(k1) - k2 = k1 - k1 = k1 - 1 - ELSE - k2 = k1 - k1 = k1 - 1 - wgt1 = log(ptop/pq(k2)) / log(pq(k1)/pq(k2)) - wgt2 = 1.0 - wgt1 - twtop = twq(k1) * wgt1 + twq(k2) * wgt2 - rhtop = rhq(k1) * wgt1 + rhq(k2) * wgt2 - END IF -! -! Calculate temp and wet-bulb ranges below precip generating level. - DO L = 1, k1 - twmax = max(twq(l),twmax) - enddo -! -! Gross check for solid precip, initialize ice fraction. -! IF (i.eq.1.and.j.eq.1) WRITE (*,*) 'twmax=',twmax,twice,'twtop=',twtop - - IF (twtop <= twice) THEN - icefrac = 1.0 - IF (twmax <= twmelt) THEN ! gross check for solid precip. - ptyp = 1 ! solid precip - return - END IF - lll = 0 - ELSE - icefrac = 0.0 - lll = 1 - END IF -! -! Loop downward through sounding from highest precip generating level. - 30 CONTINUE -! - IF (icefrac >= 1.0) THEN ! starting as all ice - IF (twq(k1) < twmelt) GO TO 40 ! cannot commence melting - IF (twq(k1) == twtop) GO TO 40 ! both equal twmelt, nothing h - wgt1 = (twmelt-twq(k1)) / (twtop-twq(k1)) - rhavg = rhq(k1) + wgt1 * (rhtop-rhq(k1)) * 0.5 - dtavg = (twmelt-twq(k1)) * 0.5 - dpk = wgt1 * log(pq(k1)/ptop) !lin dpk=wgt1*(Pq(k1)-Ptop) -! mye=emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - ELSE IF (icefrac <= 0.0) THEN ! starting as all liquid - lll = 1 -! Goto 1020 - IF (twq(k1) > twice) GO TO 40 ! cannot commence freezing - IF (twq(k1) == twtop) THEN - wgt1 = 0.5 - ELSE - wgt1 = (twice-twq(k1)) / (twtop-twq(k1)) - END IF - rhavg = rhq(k1) + wgt1 * (rhtop-rhq(k1)) * 0.5 - dtavg = twmelt - (twq(k1)+twice) * 0.5 - dpk = wgt1 * log(pq(k1)/ptop) !lin dpk=wgt1*(Pq(k1)-Ptop) -! mye = emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - ELSE IF ((twq(k1) <= twmelt).and.(twq(k1) < twmelt)) THEN ! mix - rhavg = (rhq(k1)+rhtop) * 0.5 - dtavg = twmelt - (twq(k1)+twtop) * 0.5 - dpk = log(pq(k1)/ptop) !lin dpk=Pq(k1)-Ptop -! mye = emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - ELSE ! mix where Tw curve crosses twmelt in layer - IF (twq(k1) == twtop) GO TO 40 ! both equal twmelt, nothing h - wgt1 = (twmelt-twq(k1)) / (twtop-twq(k1)) - wgt2 = 1.0 - wgt1 - rhavg = rhtop + wgt2 * (rhq(k1)-rhtop) * 0.5 - dtavg = (twmelt-twtop) * 0.5 - dpk = wgt2 * log(pq(k1)/ptop) !lin dpk=wgt2*(Pq(k1)-Ptop) -! mye = emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - icefrac = min(1.0,max(icefrac,0.0)) - IF (icefrac <= 0.0) THEN -! Goto 1020 - IF (twq(k1) > twice) GO TO 40 ! cannot commence freezin - wgt1 = (twice-twq(k1)) / (twtop-twq(k1)) - dtavg = twmelt - (twq(k1)+twice) * 0.5 - ELSE - dtavg = (twmelt-twq(k1)) * 0.5 - END IF - rhavg = rhq(k1) + wgt1 * (rhtop-rhq(k1)) * 0.5 - dpk = wgt1 * log(pq(k1)/ptop) !lin dpk=wgt1*(Pq(k1)-Ptop) -! mye = emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - END IF -! - icefrac = min(1.0,max(icefrac,0.0)) - -! IF (i.eq.1.and.j.eq.1) WRITE (*,*) 'NEW ICEFRAC:', icefrac, icefrac -! -! Get next level down if there is one, loop back. - 40 continue - IF (k1 > 1) THEN - twtop = twq(k1) - ptop = pq(k1) - rhtop = rhq(k1) - k1 = k1 - 1 - GO TO 30 - END IF -! -! Determine precip type based on snow fraction and surface wet-bulb. -! - IF (icefrac >= slim) THEN - IF (lll /= 0) THEN - ptyp = 2 ! Ice Pellets JC 9/16/99 - ELSE - ptyp = 1 ! Snow - END IF - ELSE IF (icefrac <= rlim) THEN - IF (twq(1).lt.tz) THEN - ptyp = 4 ! Freezing Precip - ELSE - ptyp = 8 ! Rain - END IF - ELSE - IF (twq(1) < tz) THEN -!GSM not sure what to do when 'mix' is predicted; In previous -!GSM versions of this code for which I had to have an answer, -!GSM I chose sleet. Here, though, since we have 4 other -!GSM algorithms to provide an answer, I will not declare a -!GSM type from the Ramer in this situation and allow the -!GSM other algorithms to make the call. - - ptyp = 0 ! don't know -! ptyp = 5 ! Mix - ELSE -! ptyp = 5 ! Mix - ptyp = 0 ! don't know - END IF - END IF - - RETURN -! - END -! -! -!-------------------------------------------------------------------------- -! REAL*4 FUNCTION mytw(t,td,p) - FUNCTION xmytw(t,td,p) -! - IMPLICIT NONE -! - INTEGER*4 cflag, l -! REAL*4 f, c0, c1, c2, k, kd, kw, ew, t, td, p, ed, fp, s, & - REAL f, c0, c1, c2, k, kd, kw, ew, t, td, p, ed, fp, s, & - & de, xmytw - DATA f, c0, c1, c2 /0.0006355, 26.66082, 0.0091379024, 6106.3960/ -! -! - xmytw = (t+td) / 2 - IF (td.ge.t) RETURN -! - IF (t.lt.100.0) THEN - k = t + 273.15 - kd = td + 273.15 - IF (kd.ge.k) RETURN - cflag = 1 - ELSE - k = t - kd = td - cflag = 0 - END IF -! - ed = c0 - c1 * kd - c2 / kd - IF (ed.lt.-14.0.or.ed.gt.7.0) RETURN - ed = exp(ed) - ew = c0 - c1 * k - c2 / k - IF (ew.lt.-14.0.or.ew.gt.7.0) RETURN - ew = exp(ew) - fp = p * f - s = (ew-ed) / (k-kd) - kw = (k*fp+kd*s) / (fp+s) -! - DO 10 l = 1, 5 - ew = c0 - c1 * kw - c2 / kw - IF (ew.lt.-14.0.or.ew.gt.7.0) RETURN - ew = exp(ew) - de = fp * (k-kw) + ed - ew - IF (abs(de/ew).lt.1E-5) GO TO 20 - s = ew * (c1-c2/(kw*kw)) - fp - kw = kw - de / s - 10 CONTINUE - 20 CONTINUE -! -! print *, 'kw ', kw - IF (cflag.ne.0) THEN - xmytw = kw - 273.15 - ELSE - xmytw = kw - END IF -! - RETURN - END -! -! -!$$$ Subprogram documentation block -! -! Subprogram: calwxt_bourg Calculate precipitation type (Bourgouin) -! Prgmmr: Baldwin Org: np22 Date: 1999-07-06 -! -! Abstract: This routine computes precipitation type -! using a decision tree approach that uses the so-called -! "energy method" of Bourgouin of AES (Canada) 1992 -! -! Program history log: -! 1999-07-06 M Baldwin -! 1999-09-20 M Baldwin make more consistent with bourgouin (1992) -! 2005-08-24 G Manikin added to wrf post -! 2007-06-19 M Iredell mersenne twister, best practices -! 2008-03-03 G Manikin added checks to prevent stratospheric warming -! episodes from being seen as "warm" layers -! impacting precip type -! -! Usage: call calwxt_bourg(im,jm,jsta_2l,jend_2u,jsta,jend,lm,lp1, & -! & iseed,g,pthresh, & -! & t,q,pmid,pint,lmh,prec,zint,ptype) -! Input argument list: -! im integer i dimension -! jm integer j dimension -! jsta_2l integer j dimension start point (including haloes) -! jend_2u integer j dimension end point (including haloes) -! jsta integer j dimension start point (excluding haloes) -! jend integer j dimension end point (excluding haloes) -! lm integer k dimension -! lp1 integer k dimension plus 1 -! iseed integer random number seed -! g real gravity (m/s**2) -! pthresh real precipitation threshold (m) -! t real(im,jsta_2l:jend_2u,lm) mid layer temp (K) -! q real(im,jsta_2l:jend_2u,lm) specific humidity (kg/kg) -! pmid real(im,jsta_2l:jend_2u,lm) mid layer pressure (Pa) -! pint real(im,jsta_2l:jend_2u,lp1) interface pressure (Pa) -! lmh real(im,jsta_2l:jend_2u) max number of layers -! prec real(im,jsta_2l:jend_2u) precipitation (m) -! zint real(im,jsta_2l:jend_2u,lp1) interface height (m) -! Output argument list: -! ptype real(im,jm) instantaneous weather type () -! acts like a 4 bit binary -! 1111 = rain/freezing rain/ice pellets/snow -! where the one's digit is for snow -! the two's digit is for ice pellets -! the four's digit is for freezing rain -! and the eight's digit is for rain -! in other words... -! ptype=1 snow -! ptype=2 ice pellets/mix with ice pellets -! ptype=4 freezing rain/mix with freezing rain -! ptype=8 rain -! -! Modules used: -! mersenne_twister pseudo-random number generator -! -! Subprograms called: -! random_number pseudo-random number generator -! -! Attributes: -! Language: Fortran 90 -! -! Remarks: vertical order of arrays must be layer 1 = top -! and layer lmh = bottom -! -!$$$ - subroutine calwxt_bourg(lm,lp1,rn,g,pthresh, & - & t,q,pmid,pint,prec,zint,ptype) -! use mersenne_twister - implicit none -! -! input: - integer,intent(in):: lm,lp1 -! integer,intent(in):: iseed - real,intent(in):: g,pthresh,rn - real*8,intent(in):: t(lm) - real,intent(in):: q(lm) - real,intent(in):: pmid(lm) - real,intent(in):: pint(lp1) - real,intent(in):: prec - real,intent(in):: zint(lp1) -! -! output: - real,intent(out):: ptype -! - integer ifrzl,iwrml,l,lhiwrm - real pintk1,areane,tlmhk,areape,pintk2,surfw,area1,dzkl,psfck -! -! initialize weather type array to zero (ie, off). -! we do this since we want ptype to represent the -! instantaneous weather type on return. -! -!!$omp parallel do - - ptype = 0 - -! -! call random_number(rn,iseed) -! -!!$omp parallel do -!!$omp& private(a,tlmhk,iwrml,psfck,lhiwrm,pintk1,pintk2,area1, -!!$omp& areape,dzkl,surfw,r1,r2) - - psfck=pint(lm+1) -! -! skip this point if no precip this time step -! - if (prec.le.pthresh) return -! find the depth of the warm layer based at the surface -! this will be the cut off point between computing -! the surface based warm air and the warm air aloft -! -! -! lowest layer t -! - tlmhk = t(lm) - iwrml = lm + 1 - if (tlmhk.ge.273.15) then - do l = lm, 2, -1 - if (t(l).ge.273.15.and.t(l-1).lt.273.15.and. & - & iwrml.eq.lm+1) iwrml = l - end do - end if -! -! now find the highest above freezing level -! - lhiwrm = lm + 1 - do l = lm, 1, -1 -! gsm added 250 mb check to prevent stratospheric warming situations -! from counting as warm layers aloft - if (t(l).ge.273.15 .and. pmid(l).gt.25000.) lhiwrm = l - end do - -! energy variables -! surfw is the positive energy between the ground -! and the first sub-freezing layer above ground -! areane is the negative energy between the ground -! and the highest layer above ground -! that is above freezing -! areape is the positive energy "aloft" -! which is the warm energy not based at the ground -! (the total warm energy = surfw + areape) -! -! pintk1 is the pressure at the bottom of the layer -! pintk2 is the pressure at the top of the layer -! dzkl is the thickness of the layer -! ifrzl is a flag that tells us if we have hit -! a below freezing layer -! - pintk1 = psfck - ifrzl = 0 - areane = 0.0 - areape = 0.0 - surfw = 0.0 - - do l = lm, 1, -1 - if (ifrzl.eq.0.and.t(l).le.273.15) ifrzl = 1 - pintk2=pint(l) - dzkl=zint(l)-zint(l+1) - area1 = log(t(l)/273.15) * g * dzkl - if (t(l).ge.273.15.and. pmid(l).gt.25000.) then - if (l.lt.iwrml) areape = areape + area1 - if (l.ge.iwrml) surfw = surfw + area1 - else - if (l.gt.lhiwrm) areane = areane + abs(area1) - end if - pintk1 = pintk2 - end do - -! -! decision tree time -! - if (areape.lt.2.0) then -! very little or no positive energy aloft, check for -! positive energy just above the surface to determine rain vs. snow - if (surfw.lt.5.6) then -! not enough positive energy just above the surface -! snow = 1 - ptype = 1 - else if (surfw.gt.13.2) then -! enough positive energy just above the surface -! rain = 8 - ptype = 8 - else -! transition zone, assume equally likely rain/snow -! picking a random number, if <=0.5 snow - if (rn.le.0.5) then -! snow = 1 - ptype = 1 - else -! rain = 8 - ptype = 8 - end if - end if -! - else -! some positive energy aloft, check for enough negative energy -! to freeze and make ice pellets to determine ip vs. zr - if (areane.gt.66.0+0.66*areape) then -! enough negative area to make ip, -! now need to check if there is enough positive energy -! just above the surface to melt ip to make rain - if (surfw.lt.5.6) then -! not enough energy at the surface to melt ip -! ice pellets = 2 - ptype = 2 - else if (surfw.gt.13.2) then -! enough energy at the surface to melt ip -! rain = 8 - ptype = 8 - else -! transition zone, assume equally likely ip/rain -! picking a random number, if <=0.5 ip - if (rn.le.0.5) then -! ice pellets = 2 - ptype = 2 - else -! rain = 8 - ptype = 8 - end if - end if - else if (areane.lt.46.0+0.66*areape) then -! not enough negative energy to refreeze, check surface temp -! to determine rain vs. zr - if (tlmhk.lt.273.15) then -! freezing rain = 4 - ptype = 4 - else -! rain = 8 - ptype = 8 - end if - else -! transition zone, assume equally likely ip/zr -! picking a random number, if <=0.5 ip - if (rn.le.0.5) then -! still need to check positive energy -! just above the surface to melt ip vs. rain - if (surfw.lt.5.6) then -! ice pellets = 2 - ptype = 2 - else if (surfw.gt.13.2) then -! rain = 8 - ptype = 8 - else -! transition zone, assume equally likely ip/rain -! picking a random number, if <=0.5 ip - if (rn.le.0.25) then -! ice pellets = 2 - ptype = 2 - else -! rain = 8 - ptype = 8 - end if - end if - else -! not enough negative energy to refreeze, check surface temp -! to determine rain vs. zr - if (tlmhk.lt.273.15) then -! freezing rain = 4 - ptype = 4 - else -! rain = 8 - ptype = 8 - end if - end if - end if - end if -! end do -! end do - return - end -! -! - SUBROUTINE CALWXT_REVISED(LM,LP1,T,Q,PMID,PINT,PREC, & - PTHRESH,D608,ROG,EPSQ, & - & ZINT,TWET,IWX) -! -! FILE: CALWXT.f -! WRITTEN: 11 NOVEMBER 1993, MICHAEL BALDWIN -! REVISIONS: -! 30 SEPT 1994-SETUP NEW DECISION TREE (M BALDWIN) -! 12 JUNE 1998-CONVERSION TO 2-D (T BLACK) -! 01-10-25 H CHUANG - MODIFIED TO PROCESS HYBRID MODEL OUTPUT -! 02-01-15 MIKE BALDWIN - WRF VERSION -! 05-07-07 BINBIN ZHOU - ADD PREC FOR RSM -! 05-08-24 GEOFF MANIKIN - MODIFIED THE AREA REQUIREMENTS -! TO MAKE AN ALTERNATE ALGORITHM -! -! -! ROUTINE TO COMPUTE PRECIPITATION TYPE USING A DECISION TREE -! APPROACH THAT USES VARIABLES SUCH AS INTEGRATED WET BULB TEMP -! BELOW FREEZING AND LOWEST LAYER TEMPERATURE -! -! SEE BALDWIN AND CONTORNO PREPRINT FROM 13TH WEATHER ANALYSIS -! AND FORECASTING CONFERENCE FOR MORE DETAILS -! (OR BALDWIN ET AL, 10TH NWP CONFERENCE PREPRINT) -! -! SINCE THE ORIGINAL VERSION OF THE ALGORITHM HAS A HIGH BIAS -! FOR FREEZING RAIN AND SLEET, THE GOAL IS TO BALANCE THAT BIAS -! WITH A VERSION MORE LIKELY TO PREDICT SNOW -! -! use params_mod -! use ctlblk_mod -!- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - implicit none -! -! LIST OF VARIABLES NEEDED -! PARAMETERS: -! D608,ROG,H1,D00 -!HC PARAMETER(D608=0.608,ROG=287.04/9.8,H1=1.0,D00=0.0) -! -! INPUT: -! T,Q,PMID,HTM,LMH,PREC,ZINT - integer,intent(in):: lm,lp1 - REAL,dimension(LM),intent(in) :: Q,PMID - REAL*8,dimension(LM),intent(in) :: T,TWET - REAL,dimension(LP1),intent(in) :: PINT,ZINT - REAL,intent(in) :: PREC,PTHRESH,D608,ROG,EPSQ -! OUTPUT: -! IWX - INSTANTANEOUS WEATHER TYPE. -! ACTS LIKE A 4 BIT BINARY -! 1111 = RAIN/FREEZING RAIN/ICE PELLETS/SNOW -! WHERE THE ONE'S DIGIT IS FOR SNOW -! THE TWO'S DIGIT IS FOR ICE PELLETS -! THE FOUR'S DIGIT IS FOR FREEZING RAIN -! AND THE EIGHT'S DIGIT IS FOR RAIN - integer, intent(out) :: IWX -! INTERNAL: -! - real, parameter :: D00=0.0 - integer KARR,LICEE - real TCOLD,TWARM -! - integer L,LMHK,LICE,IWRML,IFRZL - real PSFCK,TDCHK,A,TDKL,TDPRE,TLMHK,TWRMK,AREAS8,AREAP4,AREA1, & - SURFW,SURFC,DZKL,PINTK1,PINTK2,PM150,QKL,TKL,PKL,AREA0, & - AREAP0 - -! SUBROUTINES CALLED: -! WETBULB -! -! -! INITIALIZE WEATHER TYPE ARRAY TO ZERO (IE, OFF). -! WE DO THIS SINCE WE WANT IWX TO REPRESENT THE -! INSTANTANEOUS WEATHER TYPE ON RETURN. -! -! -! ALLOCATE LOCAL STORAGE -! -! -!!$omp parallel do - IWX = 0 - -!!$omp parallel do -!!$omp& private(a,lmhk,pkl,psfck,qkl,tdchk,tdkl,tdpre,tkl) - - LMHK=LM -! -! SKIP THIS POINT IF NO PRECIP THIS TIME STEP -! - IF (PREC.LE.PTHRESH) GOTO 800 -! -! FIND COLDEST AND WARMEST TEMPS IN SATURATED LAYER BETWEEN -! 70 MB ABOVE GROUND AND 500 MB -! ALSO FIND HIGHEST SATURATED LAYER IN THAT RANGE -! -!meb - PSFCK=PINT(LP1) -!meb - TDCHK=2.0 - 760 TCOLD=T(LMHK) - TWARM=T(LMHK) - LICEE=LMHK -! - DO 775 L=1,LMHK - QKL=Q(L) - QKL=MAX(EPSQ,QKL) - TKL=T(L) - PKL=PMID(L) -! -! SKIP PAST THIS IF THE LAYER IS NOT BETWEEN 70 MB ABOVE GROUND -! AND 500 MB -! - IF (PKL.LT.50000.0.OR.PKL.GT.PSFCK-7000.0) GOTO 775 - A=LOG(QKL*PKL/(6.1078*(0.378*QKL+0.622))) - TDKL=(237.3*A)/(17.269-A)+273.15 - TDPRE=TKL-TDKL - IF (TDPRE.LT.TDCHK.AND.TKL.LT.TCOLD) TCOLD=TKL - IF (TDPRE.LT.TDCHK.AND.TKL.GT.TWARM) TWARM=TKL - IF (TDPRE.LT.TDCHK.AND.L.LT.LICEE) LICEE=L - 775 CONTINUE -! -! IF NO SAT LAYER AT DEW POINT DEP=TDCHK, INCREASE TDCHK -! AND START AGAIN (BUT DON'T MAKE TDCHK > 6) -! - IF (TCOLD.EQ.T(LMHK).AND.TDCHK.LT.6.0) THEN - TDCHK=TDCHK+2.0 - GOTO 760 - ENDIF - 800 CONTINUE -! -! LOWEST LAYER T -! - KARR=0 - IF (PREC.LE.PTHRESH) GOTO 850 - LMHK=LM - TLMHK=T(LMHK) -! -! DECISION TREE TIME -! - IF (TCOLD.GT.269.15) THEN - IF (TLMHK.LE.273.15) THEN -! TURN ON THE FLAG FOR -! FREEZING RAIN = 4 -! IF ITS NOT ON ALREADY -! IZR=MOD(IWX,8)/4 -! IF (IZR.LT.1) IWX=IWX+4 - IWX=IWX+4 - GOTO 850 - ELSE -! TURN ON THE FLAG FOR -! RAIN = 8 -! IF ITS NOT ON ALREADY -! IRAIN=IWX/8 -! IF (IRAIN.LT.1) IWX=IWX+8 - IWX=IWX+8 - GOTO 850 - ENDIF - ENDIF - KARR=1 - 850 CONTINUE -! -!!$omp parallel do -!!$omp& private(area1,areap4,areap0,areas8,dzkl,ifrzl,iwrml,lice, -!!$omp& lmhk,pintk1,pintk2,pm150,psfck,surfc,surfw, -!!$omp& tlmhk,twrmk) - - IF(KARR.GT.0)THEN - LMHK=LM - LICE=LICEE -!meb - PSFCK=PINT(LP1) -!meb - TLMHK=T(LMHK) - TWRMK=TWARM -! -! TWET AREA VARIABLES -! CALCULATE ONLY WHAT IS NEEDED -! FROM GROUND TO 150 MB ABOVE SURFACE -! FROM GROUND TO TCOLD LAYER -! AND FROM GROUND TO 1ST LAYER WHERE WET BULB T < 0.0 -! -! PINTK1 IS THE PRESSURE AT THE BOTTOM OF THE LAYER -! PINTK2 IS THE PRESSURE AT THE TOP OF THE LAYER -! -! AREAP4 IS THE AREA OF TWET ABOVE -4 C BELOW HIGHEST SAT LYR -! AREAP0 IS THE AREA OF TWET ABOVE 0 C BELOW HIGHEST SAT LYR -! - AREAS8=D00 - AREAP4=D00 - AREAP0=D00 - SURFW =D00 - SURFC =D00 - -! - DO 1945 L=LMHK,LICE,-1 - DZKL=ZINT(L)-ZINT(L+1) - AREA1=(TWET(L)-269.15)*DZKL - AREA0=(TWET(L)-273.15)*DZKL - IF (TWET(L).GE.269.15) AREAP4=AREAP4+AREA1 - IF (TWET(L).GE.273.15) AREAP0=AREAP0+AREA0 - 1945 CONTINUE -! -! IF (AREAP4.LT.3000.0) THEN -! TURN ON THE FLAG FOR -! SNOW = 1 -! IF ITS NOT ON ALREADY -! ISNO=MOD(IWX,2) -! IF (ISNO.LT.1) IWX=IWX+1 -! IWX=IWX+1 -! GO TO 1900 -! ENDIF - IF (AREAP0.LT.350.0) THEN -! TURN ON THE FLAG FOR -! SNOW = 1 - IWX=IWX+1 - GOTO 1900 - ENDIF -! -! AREAS8 IS THE NET AREA OF TWET W.R.T. FREEZING IN LOWEST 150MB -! - PINTK1=PSFCK - PM150=PSFCK-15000. -! - DO 1955 L=LMHK,1,-1 - PINTK2=PINT(L) - IF(PINTK1.LT.PM150)GO TO 1950 - DZKL=ZINT(L)-ZINT(L+1) -! -! SUM PARTIAL LAYER IF IN 150 MB AGL LAYER -! - IF(PINTK2.LT.PM150) & - DZKL=T(L)*(Q(L)*D608+1.0)*ROG* & - LOG(PINTK1/PM150) - AREA1=(TWET(L)-273.15)*DZKL - AREAS8=AREAS8+AREA1 - 1950 PINTK1=PINTK2 - 1955 CONTINUE -! -! SURFW IS THE AREA OF TWET ABOVE FREEZING BETWEEN THE GROUND -! AND THE FIRST LAYER ABOVE GROUND BELOW FREEZING -! SURFC IS THE AREA OF TWET BELOW FREEZING BETWEEN THE GROUND -! AND THE WARMEST SAT LAYER -! - IFRZL=0 - IWRML=0 -! - DO 2050 L=LMHK,1,-1 - IF (IFRZL.EQ.0.AND.T(L).LT.273.15) IFRZL=1 - IF (IWRML.EQ.0.AND.T(L).GE.TWRMK) IWRML=1 -! - IF (IWRML.EQ.0.OR.IFRZL.EQ.0) THEN -! if(pmid(l) .lt. 50000.)print*,'twet needed above 500mb' - DZKL=ZINT(L)-ZINT(L+1) - AREA1=(TWET(L)-273.15)*DZKL - IF(IFRZL.EQ.0.AND.TWET(L).GE.273.15)SURFW=SURFW+AREA1 - IF(IWRML.EQ.0.AND.TWET(L).LE.273.15)SURFC=SURFC+AREA1 - ENDIF - 2050 CONTINUE - IF(SURFC.LT.-3000.0.OR. & - & (AREAS8.LT.-3000.0.AND.SURFW.LT.50.0)) THEN -! TURN ON THE FLAG FOR -! ICE PELLETS = 2 -! IF ITS NOT ON ALREADY -! IIP=MOD(IWX,4)/2 -! IF (IIP.LT.1) IWX=IWX+2 - IWX=IWX+2 - GOTO 1900 - ENDIF -! - IF(TLMHK.LT.273.15) THEN -! TURN ON THE FLAG FOR -! FREEZING RAIN = 4 -! IF ITS NOT ON ALREADY -! IZR=MOD(IWX(K),8)/4 -! IF (IZR.LT.1) IWX(K)=IWX(K)+4 - IWX=IWX+4 - ELSE -! TURN ON THE FLAG FOR -! RAIN = 8 -! IF ITS NOT ON ALREADY -! IRAIN=IWX(K)/8 -! IF (IRAIN.LT.1) IWX(K)=IWX(K)+8 - IWX=IWX+8 - ENDIF - ENDIF - 1900 CONTINUE -! print *, 'revised check ', IWX(500,800) - - RETURN - END -! -! - SUBROUTINE CALWXT_EXPLICIT(LM,PTHRESH,TSKIN,PREC,SR,F_RIMEF,IWX) -! -! FILE: CALWXT.f -! WRITTEN: 24 AUGUST 2005, G MANIKIN and B FERRIER -! -! ROUTINE TO COMPUTE PRECIPITATION TYPE USING EXPLICIT FIELDS -! FROM THE MODEL MICROPHYSICS - -! use params_mod -! use ctlblk_mod -!- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - implicit none -! -! LIST OF VARIABLES NEEDED -! PARAMETERS: -! -! INPUT: - integer, intent(in):: lm - real,intent(in):: TSKIN, PREC, SR,PTHRESH - REAL,intent(in):: F_RimeF(LM) - integer,intent(out) :: IWX - real SNOW -! real PSFC -! -! ALLOCATE LOCAL STORAGE -! -!!$omp parallel do - IWX = 0 - -!GSM THE RSM IS CURRENTLY INCOMPATIBLE WITH THIS ROUTINE -!GSM ACCORDING TO B FERRIER, THERE MAY BE A WAY TO WRITE -!GSM A VERSION OF THIS ALGORITHM TO WORK WITH THE RSM -!GSM MICROPHYSICS, BUT IT DOESN'T EXIST AT THIS TIME -!!$omp parallel do -!!$omp& private(psfc,tskin) - -! SKIP THIS POINT IF NO PRECIP THIS TIME STEP -! - IF (PREC.LE.PTHRESH) GOTO 800 -! -! A SNOW RATIO LESS THAN 0.5 ELIMINATES SNOW AND SLEET -! USE THE SKIN TEMPERATURE TO DISTINGUISH RAIN FROM FREEZING RAIN -! NOTE THAT 2-M TEMPERATURE MAY BE A BETTER CHOICE IF THE MODEL -! HAS A COLD BIAS FOR SKIN TEMPERATURE -! - IF (SR.LT.0.5) THEN -! SURFACE (SKIN) POTENTIAL TEMPERATURE AND TEMPERATURE. -! PSFC=PMID(LM) -! TSKIN=THS*(PSFC/P1000)**CAPA - - IF (TSKIN.LT.273.15) THEN -! FREEZING RAIN = 4 - IWX=IWX+4 - ELSE -! RAIN = 8 - IWX=IWX+8 - ENDIF - ELSE -! -! DISTINGUISH SNOW FROM SLEET WITH THE RIME FACTOR -! - IF(F_RimeF(LM).GE.10) THEN -! SLEET = 2 - IWX=IWX+2 - ELSE - SNOW = 1 - IWX=IWX+1 - ENDIF - ENDIF - 800 CONTINUE - 810 RETURN - END -! -! - SUBROUTINE CALWXT_DOMINANT(NALG,PREC,PTHRESH,RAIN,FREEZR,SLEET,SNOW, & - & DOMR,DOMZR,DOMIP,DOMS) -! -! WRITTEN: 24 AUGUST 2005, G MANIKIN -! -! THIS ROUTINE TAKES THE PRECIP TYPE SOLUTIONS FROM DIFFERENT -! ALGORITHMS AND SUMS THEM UP TO GIVE A DOMINANT TYPE -! -! use params_mod -! use ctlblk_mod -!- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - implicit none -! -! INPUT: - integer,intent(in) :: NALG - REAL, intent(in) :: PREC,PTHRESH - real,intent(out) :: DOMS,DOMR,DOMZR,DOMIP - real,DIMENSION(NALG),intent(in) :: RAIN,SNOW,SLEET,FREEZR - integer L - real TOTSN,TOTIP,TOTR,TOTZR -!-------------------------------------------------------------------------- -! write(6,*) 'into dominant' -!!$omp parallel do - DOMR = 0. - DOMS = 0. - DOMZR = 0. - DOMIP = 0. -! -!!$omp parallel do -!!$omp& private(totsn,totip,totr,totzr) -! SKIP THIS POINT IF NO PRECIP THIS TIME STEP - IF (PREC.LE.PTHRESH) GOTO 800 - TOTSN = 0. - TOTIP = 0. - TOTR = 0. - TOTZR = 0. -! LOOP OVER THE NUMBER OF DIFFERENT ALGORITHMS THAT ARE USED - DO 820 L = 1, NALG - IF (RAIN(L).GT. 0) THEN - TOTR = TOTR + 1 - GOTO 830 - ENDIF - - IF (SNOW(L).GT. 0) THEN - TOTSN = TOTSN + 1 - GOTO 830 - ENDIF - - IF (SLEET(L).GT. 0) THEN - TOTIP = TOTIP + 1 - GOTO 830 - ENDIF - - IF (FREEZR(L).GT. 0) THEN - TOTZR = TOTZR + 1 - GOTO 830 - ENDIF - 830 CONTINUE - 820 CONTINUE -! print *, 'Calprecip Total Rain, snow, sleet, freeze= ', & -! TOTR,TOTSN,TOTIP,TOTZR - -! TIES ARE BROKEN TO FAVOR THE MOST DANGEROUS FORM OF PRECIP -! FREEZING RAIN > SNOW > SLEET > RAIN - IF (TOTSN .GT. TOTIP) THEN - IF (TOTSN .GT. TOTZR) THEN - IF (TOTSN .GE. TOTR) THEN - DOMS = 1 - GOTO 800 - ELSE - DOMR = 1 - GOTO 800 - ENDIF - ELSE IF (TOTZR .GE. TOTR) THEN - DOMZR = 1 - GOTO 800 - ELSE - DOMR = 1 - GOTO 800 - ENDIF - ELSE IF (TOTIP .GT. TOTZR) THEN - IF (TOTIP .GE. TOTR) THEN - DOMIP = 1 - GOTO 800 - ELSE - DOMR = 1 - GOTO 800 - ENDIF - ELSE IF (TOTZR .GE. TOTR) THEN - DOMZR = 1 - GOTO 800 - ELSE - DOMR = 1 - GOTO 800 - ENDIF - 800 CONTINUE - RETURN - END - - - - - diff --git a/sorc/gfs_bufr.fd/calwxt_gfs_baldwin.f b/sorc/gfs_bufr.fd/calwxt_gfs_baldwin.f deleted file mode 100755 index 217dbbcc0c..0000000000 --- a/sorc/gfs_bufr.fd/calwxt_gfs_baldwin.f +++ /dev/null @@ -1,294 +0,0 @@ - SUBROUTINE CALWXT(T,Q,td,twet,P,PINT,LMH,IWX,nd) -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: PROGRAM NAME (up to 20 characters) -C PRGMMR: YOUR NAME ORG: W/NMCXX DATE: YY-MM-DD -C -C ABSTRACT: START ABSTRACT HERE AND INDENT TO COLUMN 5 ON THE -C FOLLOWING LINES. PLEASE PROVIDE A BRIEF DESCRIPTION OF -C WHAT THE SUBPROGRAM DOES. -C -C PROGRAM HISTORY LOG: -C YY-MM-DD ORIGINAL PROGRAMMER'S NAME HERE -C YY-MM-DD MODIFIER1 DESCRIPTION OF CHANGE -C YY-MM-DD MODIFIER2 DESCRIPTION OF CHANGE -C -C USAGE: CALL PROGRAM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C LANGUAGE: INDICATE EXTENSIONS, COMPILER OPTIONS -C MACHINE: IBM SP -C -C$$$ -C -C FILE: CALWXT.f -C WRITTEN: 11 NOVEMBER 1993, MICHAEL BALDWIN -C REVISIONS: 4 April 94 - 1-d version intended for obs soundings -C 16 Sept 94 - compute all variables for possible -C future decsion tree modifications -C 14 Oct 94 - clean up 1-d version, use new -C decision tree -C -C ROUTINE TO COMPUTE PRECIPITATION TYPE USING A DECISION TREE -C APPROACH THAT USES VARIABLES SUCH AS INTEGRATED WET BULB TEMP -C BELOW FREEZING AND LOWEST LAYER TEMPERATURE -C -C SEE BALDWIN AND CONTORNO PREPRINT FROM 13TH WEATHER ANALYSIS -C AND FORECASTING CONFERENCE FOR MORE DETAILS -C - PARAMETER (LM=99) - PARAMETER (H1M12=1.E-12) -C -C LIST OF VARIABLES NEEDED -C PARAMETERS: -C D608,ROG,H1,D00 - PARAMETER(D608=0.608,ROG=287.04/9.8,H1=1.0,D00=0.0) -C -C INPUT: -C T,Q,td,twet,P,PINT,LMH -C -C T - Mid layer temp (K) -C Q - Mid layer spec hum (g/g) -C TD - Mid layer dew point temp (K) -C TWET - Mid layer wet bulb temp (K) -C P - Mid layer pressure (Pa) (linear average of interfacial -C pressures in log P) -C PINT - Interfacial pressure (Pa) -C LMH - Number of layers -c nd - 0 .. no print 1 .. print -C+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -C NOTE: VERTICAL ORDER OF ARRAYS MUST BE LAYER 1 = TOP -C ---- . -C . -C . -C LAYER LMH = BOTTOM -C (JUST LIKE IN THE ETA MODEL) -C+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -C -C INTERNAL: -C -C -C OUTPUT: -C IWX - INSTANTANEOUS WEATHER TYPE. -C ACTS LIKE A 4 BIT BINARY -C 1111 = RAIN/FREEZING RAIN/ICE PELLETS/SNOW -C WHERE THE ONE'S DIGIT IS FOR SNOW -C THE TWO'S DIGIT IS FOR ICE PELLETS -C THE FOUR'S DIGIT IS FOR FREEZING RAIN -C AND THE EIGHT'S DIGIT IS FOR RAIN -C -C------------------------------------------------------------- -C IN OTHER WORDS... -C -C IWX=1 SNOW -C IWX=2 ICE PELLETS/MIX WITH ICE PELLETS -C IWX=4 FREEZING RAIN/MIX WITH FREEZING RAIN -C IWX=8 RAIN -C------------------------------------------------------------- -C -C -C SUBROUTINES CALLED: -C WETBLB -C -C -C INITIALIZE WEATHER TYPE ARRAY TO ZERO (IE, OFF). -C WE DO THIS SINCE WE WANT IWX TO REPRESENT THE -C INSTANTANEOUS WEATHER TYPE ON RETURN. -C - DIMENSION T(LM+1),Q(LM),P(LM),PINT(LM+1),TWET(LM),TD(LM) -C - IWX = 0 - AREAS8=D00 - AREAN8=D00 - AREAPI=D00 - AREAP4=D00 - SURFW =D00 - SURFC =D00 -C -C NUMBER OF LEVELS -C - LMHK=LMH -C -C COMPUTE DEW POINTS, -C FIND COLDEST TEMP IN SATURATED LAYER BETWEEN -C 70 MB ABOVE GROUND AND 500 MB, -C AND FIND THE HIGHEST SAT LAYER, 'TIS THE ICE NUCL LEVEL. -C -C - PSFCK=PINT(LMHK+1) - TDCHK=2.0 - 1960 TCOLD=T(LMHK) - TWARM=T(LMHK) - LICE=LMHK - DO 1915 L=1,LMHK - QKL=Q(L) - QKL=AMAX1(H1M12,QKL) - TKL=T(L) - PKL=P(L) - tdkl = td(l) -C -C SKIP PAST THIS IF THE LAYER IS NOT BETWEEN 70 MB ABOVE GROUND -C AND 500 MB -C - IF (PKL.LT.50000.0.OR.PKL.GT.PSFCK-7000.0) GOTO 1915 - TDPRE=TKL-TDKL -C -C ALSO FIND THE HIGHEST SAT LAYER-USE FOR AREAPI,AREAP4 -C - IF (TDPRE.LT.TDCHK.AND.P(L).LT.P(LICE)) LICE=L - IF (TDPRE.LT.TDCHK.AND.TKL.GT.TWARM) TWARM=TKL - IF (TDPRE.LT.TDCHK.AND.TKL.LT.TCOLD) TCOLD=TKL - 1915 CONTINUE -C -C IF WE DONT HAVE A LAYER WITH DEW POINT DEP OF TDCHK OR LESS -C INCREASE TDCHK (UP TO 6 MAX) -C - IF (TCOLD.EQ.T(LMHK+1).AND.TDCHK.LT.6.0) THEN - TDCHK=TDCHK+2.0 - GOTO 1960 - ENDIF -C -C LOWEST LAYER T -C - TLMHK=T(LMHK+1) -C -C TWET AREA VARIABLES -C FROM GROUND TO 150 MB ABOVE SURFACE -C FROM GROUND TO TCOLD LAYER -C FROM GROUND TO 1ST LAYER WHERE T < 0.0 -C FROM GROUND TO TWARM LAYER -C -C PINTK1 IS THE PRESSURE AT THE BOTTOM OF THE LAYER -C PINTK2 IS THE PRESSURE AT THE TOP OF THE LAYER -C -C AREAPI IS THE AREA OF TWET ABOVE FREEZING BELOW TCOLD LYR -C AREAP4 IS THE AREA OF TWET ABOVE -4 C BELOW TCOLD LYR -C - PINTK1=PSFCK - DO 1945 L=LMHK,LICE,-1 - PINTK2=PINT(L) - DZKL=T(L)*(Q(L)*D608+H1)*ROG* - 1 ALOG(PINTK1/PINTK2) - AREA1=(TWET(L)-273.15)*DZKL - AREA2=(TWET(L)-269.15)*DZKL - IF (TWET(L).GE.273.15) AREAPI=AREAPI+AREA1 - IF (TWET(L).GE.269.15) AREAP4=AREAP4+AREA2 - PINTK1=PINTK2 - 1945 CONTINUE -C -C AREAS8 IS THE NET AREA OF TWET W.R.T. FREEZING IN LOWEST 150MB -C AREAN8 IS THE NET AREA OF TWET < FREEZING IN LOWEST 150MB -C - PINTK1=PSFCK - PM150=PSFCK-15000. - DO 1955 L=LMHK,1,-1 - PINTK2=PINT(L) - IF (PINTK1.LT.PM150) GOTO 1950 - DZKL=T(L)*(Q(L)*D608+H1)*ROG* - 1 ALOG(PINTK1/PINTK2) -C -C SUM PARTIAL LAYER IF IN 150 MB AGL LAYER -C - IF (PINTK2.LT.PM150) - & DZKL=T(L)*(Q(L)*D608+H1)*ROG* - 1 ALOG(PINTK1/PM150) - AREA1=(TWET(L)-273.15)*DZKL - AREAS8=AREAS8+AREA1 - IF(AREA1.LT.0.) AREAN8=AREAN8+AREA1 - 1950 PINTK1=PINTK2 - 1955 CONTINUE -C -C SURFW IS THE AREA OF TWET ABOVE FREEZING BETWEEN THE GROUND -C AND THE FIRST LAYER ABOVE GROUND BELOW FREEZING -C SURFC IS THE AREA OF TWET BELOW FREEZING BETWEEN THE GROUND -C AND THE TWARM LAYER -C - PINTK1=PSFCK - IFRZL=0 - IWRML=0 - DO 2050 L=LMHK,1,-1 - IF (IFRZL.EQ.0.AND.T(L).LE.273.15) IFRZL=1 - IF (IWRML.EQ.0.AND.T(L).GE.TWARM) IWRML=1 - PINTK2=PINT(L) - DZKL=T(L)*(Q(L)*D608+H1)*ROG* - 1 ALOG(PINTK1/PINTK2) - AREA1=(TWET(L)-273.15)*DZKL - IF (IFRZL.EQ.0) THEN - IF (TWET(L).GE.273.15) SURFW=SURFW+AREA1 - ENDIF - IF (IWRML.EQ.0) THEN - IF (TWET(L).LE.273.15) SURFC=SURFC+AREA1 - ENDIF - PINTK1=PINTK2 - 2050 CONTINUE -C -C DECISION TREE TIME -C - if(nd.eq.1) then - print *, ' tcold =', tcold - print *, ' tlmhk =', tlmhk - print *, ' areap4 =', areap4 - print *, ' areas8 =', areas8 - print *, ' surfw =', surfw - print *, ' surfc =', surfc -c print *, ' temp= ' -c print *, (t(k),k=1,lmhk+1) -c print *, ' tdew =' -c print *, (td(k),k=1,lmhk) -c print *, ' twet =' -c print *, (twet(k),k=1,lmhk) - endif - IF (TCOLD.GT.269.15) THEN - IF (TLMHK.LE.273.15) THEN -C TURN ON THE FLAG FOR -C FREEZING RAIN = 4 - IWX=4 - GOTO 1900 - ELSE -C TURN ON THE FLAG FOR -C RAIN = 8 - IWX=8 - GOTO 1900 - ENDIF - ENDIF -C - IF (AREAP4.LT.3000.0) THEN -C TURN ON THE FLAG FOR -C SNOW = 1 - IWX=1 - GOTO 1900 - ENDIF -C - IF (SURFC.LE.-3000.0.OR. - & (AREAS8.LE.-3000.0.AND.SURFW.LT.50.0)) THEN -C TURN ON THE FLAG FOR -C ICE PELLETS = 2 - IWX=2 - GOTO 1900 - ENDIF - IF (TLMHK.LT.273.15) THEN -C TURN ON THE FLAG FOR -C FREEZING RAIN = 4 - IWX=4 - ELSE -C TURN ON THE FLAG FOR -C RAIN = 8 - IWX=8 - ENDIF - 1900 CONTINUE - RETURN - END diff --git a/sorc/gfs_bufr.fd/calwxt_gfs_ramer.f b/sorc/gfs_bufr.fd/calwxt_gfs_ramer.f deleted file mode 100755 index 1faabf6214..0000000000 --- a/sorc/gfs_bufr.fd/calwxt_gfs_ramer.f +++ /dev/null @@ -1,364 +0,0 @@ -Cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc -C -C DoPhase is a subroutine written and provided by Jim Ramer at NOAA/FSL -C -C Ramer, J, 1993: An empirical technique for diagnosing precipitation -C type from model output. Preprints, 5th Conf. on Aviation -C Weather Systems, Vienna, VA, Amer. Meteor. Soc., 227-230. -C -Cccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc -C - SUBROUTINE CALWXT1(pq,tq,qq,twq,tdq,nq,lm,ppt,ptyp,trace) -c SUBROUTINE dophase(pq, ! input pressure sounding mb -c + tq, ! input temperature sounding K -c + pq, | input pressure -c + qq, ! input spec humidityfraction -c + twq, ! input wet-bulb temperature -c + nq, ! input number of levels in sounding -c + twq, ! output wet-bulb sounding K -c + icefrac, ! output ice fraction -c + ptyp) ! output(2) phase 2=Rain, 3=Frzg, 4=Solid, -C 6=IP JC 9/16/99 - LOGICAL trace -c PARAMETER (trace = .false.) - PARAMETER (A2=17.2693882,A3=273.16,A4=35.86,PQ0=379.90516) - PARAMETER (G=9.80665,CP=1004.686,RCP=0.2857141,LECP=1572.5) - PARAMETER (twice=266.55,rhprcp=0.80,deltag=1.02,prcpmin=0.3, - * emelt=0.045,rlim=0.04,slim=0.85) - PARAMETER (twmelt=273.15,tz=273.15,efac=1.0,PTHRES=0.25) -c pthres is in unit of mm and is equivalent to .01 inch -C - INTEGER*4 i, k1, lll, k2, toodry, iflag, nq -C - INTEGER ptyp -C - REAL rcp, flg, flag, xxx, pq(lm), tq(lm), twq(lm), rhq(lm), mye, - * qq(lm), icefrac, tqtmp(lm), pqtmp(lm), rhqtmp(lm) - * ,twtmp(lm),qqtmp(lm),tdqtmp(lm),tdq(lm) -C - COMMON /flagflg/ flag, flg - DATA iflag / -9/ -C -C Initialize. - IF (trace) print *, '******* NEW STATION ******' - IF (trace) print *, 'Twmelt,Twice,rhprcp,Emelt' - IF (trace) print *, twmelt, twice, rhprcp, emelt - icefrac = flag - ptyp = 0 -c IF (PPT.LE.PTHRES) RETURN -C -C GSM compute RH, convert pressure to mb, and reverse order - - DO 88 i = 1, nq - LEV=NQ-I+1 -c QC=PQ0/PQ(I) * EXP(A2*(TQ(I)-A3)/(TQ(I)-A4)) - call svp(qc,es,pq(i),tq(i)) - RHQTMP(LEV)=QQ(I)/QC - PQTMP(LEV)=PQ(I)/100. - TQTMP(LEV)=TQ(I) - TWTMP(LEV)=TWQ(I) - QQTMP(LEV)=QQ(I) - TDQTMP(LEV)=TDQ(I) - 88 CONTINUE - - do 92 i=1,nq - TQ(I)=TQTMP(I) - PQ(I)=PQTMP(I) - RHQ(I)=RHQTMP(I) - TWQ(I)=TWTMP(I) - QQ(I)=QQTMP(I) - TDQ(I)=TDQTMP(I) - 92 continue - - -C See if there was too little precip reported. -C -CCC RATE RESTRICTION REMOVED BY JOHN CORTINAS 3/16/99 -C -C Construct wet-bulb sounding, locate generating level. - twmax = -999.0 - rhmax = 0.0 - k1 = 0 ! top of precip generating layer - k2 = 0 ! layer of maximum rh -C - IF (trace) WRITE (20,*) 'rhq(1)', rhq(1) - IF (rhq(1).lt.rhprcp) THEN - toodry = 1 - ELSE - toodry = 0 - END IF -C -C toodry=((Rhq(1).lt.rhprcp).and.1) - pbot = pq(1) - DO 10 i = 1, nq -c xxx = tdofesat(esat(tq(i))*rhq(i)) -c call tdew(xxx,tq(i),qq(i),pq(i)*100.) - xxx = tdq(i) - IF (trace) print *, 'T,Rh,Td,P,nq ', tq(i), rhq(i), xxx, - + pq(i), nq -c twq(i) = xmytw(tq(i),xxx,pq(i)) - IF (trace) print *, 'Twq(i),i ', twq(i), i - twmax = amax1(twq(i),twmax) - IF (trace) print *, 'Tw,Rh,P ', twq(i) - 273.15, rhq(i), - + pq(i) - IF (pq(i).ge.400.0) THEN - IF (rhq(i).gt.rhmax) THEN - rhmax = rhq(i) - k2 = i - IF (trace) print *, 'rhmax,k2,i', rhmax, k2, i - END IF -C - IF (i.ne.1) THEN - IF (trace) print *, 'ME: toodry,i', toodry, i - IF (rhq(i).ge.rhprcp.or.toodry.eq.0) THEN - IF (toodry.ne.0) THEN - dpdrh = alog(pq(i)/pq(i-1)) / (rhq(i)- - + rhq(i-1)) - pbot = exp(alog(pq(i))+(rhprcp-rhq(i))*dpdrh) -C -Clin dpdrh=(Pq(i)-Pq(i-1))/(Rhq(i)-Rhq(i-1)) -Clin pbot=Pq(i)+(rhprcp-Rhq(i))*dpdrh - ptw = pq(i) - toodry = 0 - IF (trace) print *, 'dpdrh,pbot,rhprcp-rhq - +(i),i,ptw, toodry', dpdrh, pbot, rhprcp - rhq(i), i, ptw, - + toodry - ELSE IF (rhq(i).ge.rhprcp) THEN - ptw = pq(i) - IF (trace) print *, 'HERE1: ptw,toodry', - + ptw, toodry - ELSE - toodry = 1 - dpdrh = alog(pq(i)/pq(i-1)) / (rhq(i)- - + rhq(i-1)) - ptw = exp(alog(pq(i))+(rhprcp-rhq(i))*dpdrh) - IF (trace) print *, - + 'HERE2:dpdrh,pbot,i,ptw,toodry', dpdrh, - + pbot, i, ptw, toodry -Clin dpdrh=(Pq(i)-Pq(i-1))/(Rhq(i)-Rhq(i-1)) -Clin ptw=Pq(i)+(rhprcp-Rhq(i))*dpdrh -C - END IF -C - IF (trace) print *, 'HERE3:pbot,ptw,deltag', - + pbot, ptw, deltag - IF (pbot/ptw.ge.deltag) THEN -Clin If (pbot-ptw.lt.deltag) Goto 2003 - k1 = i - ptop = ptw - END IF - END IF - END IF - END IF -C - 10 CONTINUE -C -C Gross checks for liquid and solid precip which dont require generating level. -C -c print *, 'twq1 ', twq(1) - IF (twq(1).ge.273.15+2.0) THEN - ptyp = 8 ! liquid - IF (trace) PRINT *, 'liquid' - icefrac = 0.0 - RETURN - END IF -C - print *, 'twmax ', twmax - IF (twmax.le.twice) THEN - icefrac = 1.0 - ptyp = 1 ! solid - RETURN - END IF -C -C Check to see if we had no success with locating a generating level. -C - IF (trace) print *, 'HERE6: k1,ptyp', k1, ptyp - IF (k1.eq.0) THEN - rate = flag - RETURN - END IF -C - IF (ptop.eq.pq(k1)) THEN - twtop = twq(k1) - rhtop = rhq(k1) - k2 = k1 - k1 = k1 - 1 - ELSE - k2 = k1 - k1 = k1 - 1 - wgt1 = alog(ptop/pq(k2)) / alog(pq(k1)/pq(k2)) -Clin wgt1=(ptop-Pq(k2))/(Pq(k1)-Pq(k2)) - wgt2 = 1.0 - wgt1 - twtop = twq(k1) * wgt1 + twq(k2) * wgt2 - rhtop = rhq(k1) * wgt1 + rhq(k2) * wgt2 - END IF -C - IF (trace) print *, - + 'HERE7: ptop,k1,pq(k1),twtop,rhtop,k2,wgt1, wgt2', ptop, - + k1, pq(k1), twtop, rhtop, k2, wgt1, wgt2 -C -C Calculate temp and wet-bulb ranges below precip generating level. - DO 20 i = 1, k1 - twmax = amax1(twq(i),twmax) - 20 CONTINUE -C -C Gross check for solid precip, initialize ice fraction. - IF (trace) print *, twmax - IF (twtop.le.twice) THEN - icefrac = 1.0 - IF (twmax.le.twmelt) THEN ! gross check for solid precip. - IF (trace) PRINT *, 'solid' - ptyp = 1 ! solid precip - RETURN - END IF - lll = 0 - ELSE - icefrac = 0.0 - lll = 1 - END IF -C -C Loop downward through sounding from highest precip generating level. - 30 CONTINUE -C - IF (trace) PRINT *, ptop, twtop - 273.15, icefrac - IF (trace) print *, 'P,Tw,frac,twq(k1)', ptop, twtop - 273.15, - + icefrac, twq(k1) - IF (icefrac.ge.1.0) THEN ! starting as all ice - IF (trace) print *, 'ICEFRAC=1', icefrac - print *, 'twq twmwelt twtop ', twq(k1), twmelt, twtop - IF (twq(k1).lt.twmelt) GO TO 40 ! cannot commence melting - IF (twq(k1).eq.twtop) GO TO 40 ! both equal twmelt, nothing h - wgt1 = (twmelt-twq(k1)) / (twtop-twq(k1)) - rhavg = rhq(k1) + wgt1 * (rhtop-rhq(k1)) / 2 - dtavg = (twmelt-twq(k1)) / 2 - dpk = wgt1 * alog(pq(k1)/ptop) !lin dpk=wgt1*(Pq(k1)-Ptop) -C mye=emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - IF (trace) print *, - + 'HERE8: wgt1,rhavg,dtavg,dpk,mye,icefrac', wgt1, rhavg, - + dtavg, dpk, mye, icefrac - ELSE IF (icefrac.le.0.0) THEN ! starting as all liquid - IF (trace) print *, 'HERE9: twtop,twq(k1),k1,lll', twtop, - + twq(k1), k1, lll - lll = 1 -C If (Twq(k1).le.Twice) icefrac=1.0 ! autoconvert -C Goto 1020 - IF (twq(k1).gt.twice) GO TO 40 ! cannot commence freezing - IF (twq(k1).eq.twtop) THEN - wgt1 = 0.5 - ELSE - wgt1 = (twice-twq(k1)) / (twtop-twq(k1)) - END IF - rhavg = rhq(k1) + wgt1 * (rhtop-rhq(k1)) / 2 - dtavg = twmelt - (twq(k1)+twice) / 2 - dpk = wgt1 * alog(pq(k1)/ptop) !lin dpk=wgt1*(Pq(k1)-Ptop) -C mye=emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - IF (trace) print *, 'HERE10: wgt1,rhtop,rhq(k1),dtavg', - + wgt1, rhtop, rhq(k1), dtavg - ELSE IF ((twq(k1).le.twmelt).and.(twq(k1).lt.twmelt)) THEN ! mix - rhavg = (rhq(k1)+rhtop) / 2 - dtavg = twmelt - (twq(k1)+twtop) / 2 - dpk = alog(pq(k1)/ptop) !lin dpk=Pq(k1)-Ptop -C mye=emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - - IF (trace) print *, 'HERE11: twq(K1),twtop', twq(k1), - + twtop - ELSE ! mix where Tw curve crosses twmelt in layer - IF (twq(k1).eq.twtop) GO TO 40 ! both equal twmelt, nothing h - wgt1 = (twmelt-twq(k1)) / (twtop-twq(k1)) - wgt2 = 1.0 - wgt1 - rhavg = rhtop + wgt2 * (rhq(k1)-rhtop) / 2 - dtavg = (twmelt-twtop) / 2 - dpk = wgt2 * alog(pq(k1)/ptop) !lin dpk=wgt2*(Pq(k1)-Ptop) -C mye=emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - icefrac = amin1(1.0,amax1(icefrac,0.0)) - IF (trace) print *, 'HERE12: twq(k1),twtop,icefrac,wgt1,wg - +t2,rhavg,rhtop,rhq(k1),dtavg,k1', twq(k1), twtop, icefrac, wgt1, - + wgt2, rhavg, rhtop, rhq(k1), dtavg, k1 - IF (icefrac.le.0.0) THEN -C If (Twq(k1).le.Twice) icefrac=1.0 ! autoconvert -C Goto 1020 - IF (twq(k1).gt.twice) GO TO 40 ! cannot commence freezin - wgt1 = (twice-twq(k1)) / (twtop-twq(k1)) - dtavg = twmelt - (twq(k1)+twice) / 2 - IF (trace) WRITE (20,*) 'IN IF' - ELSE - dtavg = (twmelt-twq(k1)) / 2 - IF (trace) WRITE (20,*) 'IN ELSE' - END IF - IF (trace) print *, 'NEW ICE FRAC CALC' - rhavg = rhq(k1) + wgt1 * (rhtop-rhq(k1)) / 2 - dpk = wgt1 * alog(pq(k1)/ptop) !lin dpk=wgt1*(Pq(k1)-Ptop) -C mye=emelt*(1.0-(1.0-Rhavg)*efac) - mye = emelt * rhavg ** efac - icefrac = icefrac + dpk * dtavg / mye - IF (trace) print *, 'HERE13: icefrac,k1,dtavg,rhavg', - + icefrac, k1, dtavg, rhavg - END IF -C - icefrac = amin1(1.0,amax1(icefrac,0.0)) - IF (trace) print *, 'NEW ICEFRAC:', icefrac, icefrac -C -C Get next level down if there is one, loop back. - 40 IF (k1.gt.1) THEN - IF (trace) WRITE (20,*) 'LOOPING BACK' - twtop = twq(k1) - ptop = pq(k1) - rhtop = rhq(k1) - k1 = k1 - 1 - GO TO 30 - END IF -C -C -C Determine precip type based on snow fraction and surface wet-bulb. -C If (trace) Print *,Pq(k1),Twq(k1)-273.15,icefrac -C - IF (trace) print *, 'P,Tw,frac,lll', pq(k1), twq(k2) - 273.15, - + icefrac, lll -C -c print *, 'icefrac ', icefrac - IF (icefrac.ge.slim) THEN - IF (lll.ne.0) THEN - ptyp = 2 ! Ice Pellets JC 9/16/99 - IF (trace) print *, 'frozen' - ELSE - ptyp = 1 ! Snow - print *, 'snow' - IF (trace) print *, 'snow' - END IF - ELSE IF (icefrac.le.rlim) THEN - IF (twq(1).lt.tz) THEN - print *, 'aha! frz' - ptyp = 4 ! Freezing Precip - IF (trace) print *, 'freezing' - ELSE - ptyp = 8 ! Rain - print *, 'rain' - IF (trace) print *, 'liquid' - END IF - ELSE - IF (trace) print *, 'Mix' - IF (twq(1).lt.tz) THEN - IF (trace) print *, 'freezing' -cGSM not sure what to do when 'mix' is predicted; I chose sleet as -cGSK a shaky best option - - ptyp = 2 ! Ice Pellets -c ptyp = 5 ! Mix - ELSE -c ptyp = 5 ! Mix - ptyp = 2 ! Ice Pellets - END IF - END IF - IF (trace) print *, "Returned ptyp is:ptyp,lll ", ptyp, lll - IF (trace) print *, "Returned icefrac is: ", icefrac - RETURN -C - END diff --git a/sorc/gfs_bufr.fd/funcphys.f b/sorc/gfs_bufr.fd/funcphys.f deleted file mode 100755 index fd30d1568f..0000000000 --- a/sorc/gfs_bufr.fd/funcphys.f +++ /dev/null @@ -1,2899 +0,0 @@ -!------------------------------------------------------------------------------- -module funcphys -!$$$ Module Documentation Block -! -! Module: funcphys API for basic thermodynamic physics -! Author: Iredell Org: W/NX23 Date: 1999-03-01 -! -! Abstract: This module provides an Application Program Interface -! for computing basic thermodynamic physics functions, in particular -! (1) saturation vapor pressure as a function of temperature, -! (2) dewpoint temperature as a function of vapor pressure, -! (3) equivalent potential temperature as a function of temperature -! and scaled pressure to the kappa power, -! (4) temperature and specific humidity along a moist adiabat -! as functions of equivalent potential temperature and -! scaled pressure to the kappa power, -! (5) scaled pressure to the kappa power as a function of pressure, and -! (6) temperature at the lifting condensation level as a function -! of temperature and dewpoint depression. -! The entry points required to set up lookup tables start with a "g". -! All the other entry points are functions starting with an "f" or -! are subroutines starting with an "s". These other functions and -! subroutines are elemental; that is, they return a scalar if they -! are passed only scalars, but they return an array if they are passed -! an array. These other functions and subroutines can be inlined, too. -! -! Program History Log: -! 1999-03-01 Mark Iredell -! 1999-10-15 Mark Iredell SI unit for pressure (Pascals) -! 2001-02-26 Mark Iredell Ice phase changes of Hong and Moorthi -! -! Public Variables: -! krealfp Integer parameter kind or length of reals (=kind_phys) -! -! Public Subprograms: -! gpvsl Compute saturation vapor pressure over liquid table -! -! fpvsl Elementally compute saturation vapor pressure over liquid -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! fpvslq Elementally compute saturation vapor pressure over liquid -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! fpvslx Elementally compute saturation vapor pressure over liquid -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! gpvsi Compute saturation vapor pressure over ice table -! -! fpvsi Elementally compute saturation vapor pressure over ice -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! fpvsiq Elementally compute saturation vapor pressure over ice -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! fpvsix Elementally compute saturation vapor pressure over ice -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! gpvs Compute saturation vapor pressure table -! -! fpvs Elementally compute saturation vapor pressure -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! fpvsq Elementally compute saturation vapor pressure -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! fpvsx Elementally compute saturation vapor pressure -! function result Real(krealfp) saturation vapor pressure in Pascals -! t Real(krealfp) temperature in Kelvin -! -! gtdpl Compute dewpoint temperature over liquid table -! -! ftdpl Elementally compute dewpoint temperature over liquid -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdplq Elementally compute dewpoint temperature over liquid -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdplx Elementally compute dewpoint temperature over liquid -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdplxg Elementally compute dewpoint temperature over liquid -! function result Real(krealfp) dewpoint temperature in Kelvin -! t Real(krealfp) guess dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! gtdpi Compute dewpoint temperature table over ice -! -! ftdpi Elementally compute dewpoint temperature over ice -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdpiq Elementally compute dewpoint temperature over ice -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdpix Elementally compute dewpoint temperature over ice -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdpixg Elementally compute dewpoint temperature over ice -! function result Real(krealfp) dewpoint temperature in Kelvin -! t Real(krealfp) guess dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! gtdp Compute dewpoint temperature table -! -! ftdp Elementally compute dewpoint temperature -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdpq Elementally compute dewpoint temperature -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdpx Elementally compute dewpoint temperature -! function result Real(krealfp) dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! ftdpxg Elementally compute dewpoint temperature -! function result Real(krealfp) dewpoint temperature in Kelvin -! t Real(krealfp) guess dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! gthe Compute equivalent potential temperature table -! -! fthe Elementally compute equivalent potential temperature -! function result Real(krealfp) equivalent potential temperature in Kelvin -! t Real(krealfp) LCL temperature in Kelvin -! pk Real(krealfp) LCL pressure over 1e5 Pa to the kappa power -! -! ftheq Elementally compute equivalent potential temperature -! function result Real(krealfp) equivalent potential temperature in Kelvin -! t Real(krealfp) LCL temperature in Kelvin -! pk Real(krealfp) LCL pressure over 1e5 Pa to the kappa power -! -! fthex Elementally compute equivalent potential temperature -! function result Real(krealfp) equivalent potential temperature in Kelvin -! t Real(krealfp) LCL temperature in Kelvin -! pk Real(krealfp) LCL pressure over 1e5 Pa to the kappa power -! -! gtma Compute moist adiabat tables -! -! stma Elementally compute moist adiabat temperature and moisture -! the Real(krealfp) equivalent potential temperature in Kelvin -! pk Real(krealfp) pressure over 1e5 Pa to the kappa power -! tma Real(krealfp) parcel temperature in Kelvin -! qma Real(krealfp) parcel specific humidity in kg/kg -! -! stmaq Elementally compute moist adiabat temperature and moisture -! the Real(krealfp) equivalent potential temperature in Kelvin -! pk Real(krealfp) pressure over 1e5 Pa to the kappa power -! tma Real(krealfp) parcel temperature in Kelvin -! qma Real(krealfp) parcel specific humidity in kg/kg -! -! stmax Elementally compute moist adiabat temperature and moisture -! the Real(krealfp) equivalent potential temperature in Kelvin -! pk Real(krealfp) pressure over 1e5 Pa to the kappa power -! tma Real(krealfp) parcel temperature in Kelvin -! qma Real(krealfp) parcel specific humidity in kg/kg -! -! stmaxg Elementally compute moist adiabat temperature and moisture -! tg Real(krealfp) guess parcel temperature in Kelvin -! the Real(krealfp) equivalent potential temperature in Kelvin -! pk Real(krealfp) pressure over 1e5 Pa to the kappa power -! tma Real(krealfp) parcel temperature in Kelvin -! qma Real(krealfp) parcel specific humidity in kg/kg -! -! gpkap Compute pressure to the kappa table -! -! fpkap Elementally raise pressure to the kappa power. -! function result Real(krealfp) p over 1e5 Pa to the kappa power -! p Real(krealfp) pressure in Pascals -! -! fpkapq Elementally raise pressure to the kappa power. -! function result Real(krealfp) p over 1e5 Pa to the kappa power -! p Real(krealfp) pressure in Pascals -! -! fpkapo Elementally raise pressure to the kappa power. -! function result Real(krealfp) p over 1e5 Pa to the kappa power -! p Real(krealfp) surface pressure in Pascals -! -! fpkapx Elementally raise pressure to the kappa power. -! function result Real(krealfp) p over 1e5 Pa to the kappa power -! p Real(krealfp) pressure in Pascals -! -! grkap Compute pressure to the 1/kappa table -! -! frkap Elementally raise pressure to the 1/kappa power. -! function result Real(krealfp) pressure in Pascals -! pkap Real(krealfp) p over 1e5 Pa to the 1/kappa power -! -! frkapq Elementally raise pressure to the kappa power. -! function result Real(krealfp) pressure in Pascals -! pkap Real(krealfp) p over 1e5 Pa to the kappa power -! -! frkapx Elementally raise pressure to the kappa power. -! function result Real(krealfp) pressure in Pascals -! pkap Real(krealfp) p over 1e5 Pa to the kappa power -! -! gtlcl Compute LCL temperature table -! -! ftlcl Elementally compute LCL temperature. -! function result Real(krealfp) temperature at the LCL in Kelvin -! t Real(krealfp) temperature in Kelvin -! tdpd Real(krealfp) dewpoint depression in Kelvin -! -! ftlclq Elementally compute LCL temperature. -! function result Real(krealfp) temperature at the LCL in Kelvin -! t Real(krealfp) temperature in Kelvin -! tdpd Real(krealfp) dewpoint depression in Kelvin -! -! ftlclo Elementally compute LCL temperature. -! function result Real(krealfp) temperature at the LCL in Kelvin -! t Real(krealfp) temperature in Kelvin -! tdpd Real(krealfp) dewpoint depression in Kelvin -! -! ftlclx Elementally compute LCL temperature. -! function result Real(krealfp) temperature at the LCL in Kelvin -! t Real(krealfp) temperature in Kelvin -! tdpd Real(krealfp) dewpoint depression in Kelvin -! -! gfuncphys Compute all physics function tables -! -! Attributes: -! Language: Fortran 90 -! -!$$$ - use machine,only:kind_phys - use physcons - implicit none - private -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! Public Variables -! integer,public,parameter:: krealfp=selected_real_kind(15,45) - integer,public,parameter:: krealfp=kind_phys -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! Private Variables - real(krealfp),parameter:: psatb=con_psat*1.e-5 - integer,parameter:: nxpvsl=7501 - real(krealfp) c1xpvsl,c2xpvsl,tbpvsl(nxpvsl) - integer,parameter:: nxpvsi=7501 - real(krealfp) c1xpvsi,c2xpvsi,tbpvsi(nxpvsi) - integer,parameter:: nxpvs=7501 - real(krealfp) c1xpvs,c2xpvs,tbpvs(nxpvs) - integer,parameter:: nxtdpl=5001 - real(krealfp) c1xtdpl,c2xtdpl,tbtdpl(nxtdpl) - integer,parameter:: nxtdpi=5001 - real(krealfp) c1xtdpi,c2xtdpi,tbtdpi(nxtdpi) - integer,parameter:: nxtdp=5001 - real(krealfp) c1xtdp,c2xtdp,tbtdp(nxtdp) - integer,parameter:: nxthe=241,nythe=151 - real(krealfp) c1xthe,c2xthe,c1ythe,c2ythe,tbthe(nxthe,nythe) - integer,parameter:: nxma=151,nyma=121 - real(krealfp) c1xma,c2xma,c1yma,c2yma,tbtma(nxma,nyma),tbqma(nxma,nyma) -! integer,parameter:: nxpkap=5501 - integer,parameter:: nxpkap=11001 - real(krealfp) c1xpkap,c2xpkap,tbpkap(nxpkap) - integer,parameter:: nxrkap=5501 - real(krealfp) c1xrkap,c2xrkap,tbrkap(nxrkap) - integer,parameter:: nxtlcl=151,nytlcl=61 - real(krealfp) c1xtlcl,c2xtlcl,c1ytlcl,c2ytlcl,tbtlcl(nxtlcl,nytlcl) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! Public Subprograms - public gpvsl,fpvsl,fpvslq,fpvslx - public gpvsi,fpvsi,fpvsiq,fpvsix - public gpvs,fpvs,fpvsq,fpvsx - public gtdpl,ftdpl,ftdplq,ftdplx,ftdplxg - public gtdpi,ftdpi,ftdpiq,ftdpix,ftdpixg - public gtdp,ftdp,ftdpq,ftdpx,ftdpxg - public gthe,fthe,ftheq,fthex - public gtma,stma,stmaq,stmax,stmaxg - public gpkap,fpkap,fpkapq,fpkapo,fpkapx - public grkap,frkap,frkapq,frkapx - public gtlcl,ftlcl,ftlclq,ftlclo,ftlclx - public gfuncphys -contains -!------------------------------------------------------------------------------- - subroutine gpvsl -!$$$ Subprogram Documentation Block -! -! Subprogram: gpvsl Compute saturation vapor pressure table over liquid -! Author: N Phillips W/NMC2X2 Date: 30 dec 82 -! -! Abstract: Computes saturation vapor pressure table as a function of -! temperature for the table lookup function fpvsl. -! Exact saturation vapor pressures are calculated in subprogram fpvslx. -! The current implementation computes a table with a length -! of 7501 for temperatures ranging from 180. to 330. Kelvin. -! -! Program History Log: -! 91-05-07 Iredell -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! -! Usage: call gpvsl -! -! Subprograms called: -! (fpvslx) inlinable function to compute saturation vapor pressure -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx - real(krealfp) xmin,xmax,xinc,x,t -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=180.0_krealfp - xmax=330.0_krealfp - xinc=(xmax-xmin)/(nxpvsl-1) -! c1xpvsl=1.-xmin/xinc - c2xpvsl=1./xinc - c1xpvsl=1.-xmin*c2xpvsl - do jx=1,nxpvsl - x=xmin+(jx-1)*xinc - t=x - tbpvsl(jx)=fpvslx(t) - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function fpvsl(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvsl Compute saturation vapor pressure over liquid -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute saturation vapor pressure from the temperature. -! A linear interpolation is done between values in a lookup table -! computed in gpvsl. See documentation for fpvslx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is almost 6 decimal places. -! On the Cray, fpvsl is about 4 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! -! Usage: pvsl=fpvsl(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvsl Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvsl - real(krealfp),intent(in):: t - integer jx - real(krealfp) xj -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xpvsl+c2xpvsl*t,1._krealfp),real(nxpvsl,krealfp)) - jx=min(xj,nxpvsl-1._krealfp) - fpvsl=tbpvsl(jx)+(xj-jx)*(tbpvsl(jx+1)-tbpvsl(jx)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function fpvslq(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvslq Compute saturation vapor pressure over liquid -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute saturation vapor pressure from the temperature. -! A quadratic interpolation is done between values in a lookup table -! computed in gpvsl. See documentation for fpvslx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is almost 9 decimal places. -! On the Cray, fpvslq is about 3 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell quadratic interpolation -! 1999-03-01 Iredell f90 module -! -! Usage: pvsl=fpvslq(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvslq Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvslq - real(krealfp),intent(in):: t - integer jx - real(krealfp) xj,dxj,fj1,fj2,fj3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xpvsl+c2xpvsl*t,1._krealfp),real(nxpvsl,krealfp)) - jx=min(max(nint(xj),2),nxpvsl-1) - dxj=xj-jx - fj1=tbpvsl(jx-1) - fj2=tbpvsl(jx) - fj3=tbpvsl(jx+1) - fpvslq=(((fj3+fj1)/2-fj2)*dxj+(fj3-fj1)/2)*dxj+fj2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function fpvslx(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvslx Compute saturation vapor pressure over liquid -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Exactly compute saturation vapor pressure from temperature. -! The water model assumes a perfect gas, constant specific heats -! for gas and liquid, and neglects the volume of the liquid. -! The model does account for the variation of the latent heat -! of condensation with temperature. The ice option is not included. -! The Clausius-Clapeyron equation is integrated from the triple point -! to get the formula -! pvsl=con_psat*(tr**xa)*exp(xb*(1.-tr)) -! where tr is ttp/t and other values are physical constants. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! -! Usage: pvsl=fpvslx(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvslx Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvslx - real(krealfp),intent(in):: t - real(krealfp),parameter:: dldt=con_cvap-con_cliq - real(krealfp),parameter:: heat=con_hvap - real(krealfp),parameter:: xpona=-dldt/con_rv - real(krealfp),parameter:: xponb=-dldt/con_rv+heat/(con_rv*con_ttp) - real(krealfp) tr -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - tr=con_ttp/t - fpvslx=con_psat*(tr**xpona)*exp(xponb*(1.-tr)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gpvsi -!$$$ Subprogram Documentation Block -! -! Subprogram: gpvsi Compute saturation vapor pressure table over ice -! Author: N Phillips W/NMC2X2 Date: 30 dec 82 -! -! Abstract: Computes saturation vapor pressure table as a function of -! temperature for the table lookup function fpvsi. -! Exact saturation vapor pressures are calculated in subprogram fpvsix. -! The current implementation computes a table with a length -! of 7501 for temperatures ranging from 180. to 330. Kelvin. -! -! Program History Log: -! 91-05-07 Iredell -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: call gpvsi -! -! Subprograms called: -! (fpvsix) inlinable function to compute saturation vapor pressure -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx - real(krealfp) xmin,xmax,xinc,x,t -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=180.0_krealfp - xmax=330.0_krealfp - xinc=(xmax-xmin)/(nxpvsi-1) -! c1xpvsi=1.-xmin/xinc - c2xpvsi=1./xinc - c1xpvsi=1.-xmin*c2xpvsi - do jx=1,nxpvsi - x=xmin+(jx-1)*xinc - t=x - tbpvsi(jx)=fpvsix(t) - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function fpvsi(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvsi Compute saturation vapor pressure over ice -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute saturation vapor pressure from the temperature. -! A linear interpolation is done between values in a lookup table -! computed in gpvsi. See documentation for fpvsix for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is almost 6 decimal places. -! On the Cray, fpvsi is about 4 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: pvsi=fpvsi(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvsi Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvsi - real(krealfp),intent(in):: t - integer jx - real(krealfp) xj -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xpvsi+c2xpvsi*t,1._krealfp),real(nxpvsi,krealfp)) - jx=min(xj,nxpvsi-1._krealfp) - fpvsi=tbpvsi(jx)+(xj-jx)*(tbpvsi(jx+1)-tbpvsi(jx)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function fpvsiq(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvsiq Compute saturation vapor pressure over ice -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute saturation vapor pressure from the temperature. -! A quadratic interpolation is done between values in a lookup table -! computed in gpvsi. See documentation for fpvsix for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is almost 9 decimal places. -! On the Cray, fpvsiq is about 3 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell quadratic interpolation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: pvsi=fpvsiq(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvsiq Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvsiq - real(krealfp),intent(in):: t - integer jx - real(krealfp) xj,dxj,fj1,fj2,fj3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xpvsi+c2xpvsi*t,1._krealfp),real(nxpvsi,krealfp)) - jx=min(max(nint(xj),2),nxpvsi-1) - dxj=xj-jx - fj1=tbpvsi(jx-1) - fj2=tbpvsi(jx) - fj3=tbpvsi(jx+1) - fpvsiq=(((fj3+fj1)/2-fj2)*dxj+(fj3-fj1)/2)*dxj+fj2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function fpvsix(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvsix Compute saturation vapor pressure over ice -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Exactly compute saturation vapor pressure from temperature. -! The water model assumes a perfect gas, constant specific heats -! for gas and ice, and neglects the volume of the ice. -! The model does account for the variation of the latent heat -! of condensation with temperature. The liquid option is not included. -! The Clausius-Clapeyron equation is integrated from the triple point -! to get the formula -! pvsi=con_psat*(tr**xa)*exp(xb*(1.-tr)) -! where tr is ttp/t and other values are physical constants. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: pvsi=fpvsix(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvsix Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvsix - real(krealfp),intent(in):: t - real(krealfp),parameter:: dldt=con_cvap-con_csol - real(krealfp),parameter:: heat=con_hvap+con_hfus - real(krealfp),parameter:: xpona=-dldt/con_rv - real(krealfp),parameter:: xponb=-dldt/con_rv+heat/(con_rv*con_ttp) - real(krealfp) tr -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - tr=con_ttp/t - fpvsix=con_psat*(tr**xpona)*exp(xponb*(1.-tr)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gpvs -!$$$ Subprogram Documentation Block -! -! Subprogram: gpvs Compute saturation vapor pressure table -! Author: N Phillips W/NMC2X2 Date: 30 dec 82 -! -! Abstract: Computes saturation vapor pressure table as a function of -! temperature for the table lookup function fpvs. -! Exact saturation vapor pressures are calculated in subprogram fpvsx. -! The current implementation computes a table with a length -! of 7501 for temperatures ranging from 180. to 330. Kelvin. -! -! Program History Log: -! 91-05-07 Iredell -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: call gpvs -! -! Subprograms called: -! (fpvsx) inlinable function to compute saturation vapor pressure -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx - real(krealfp) xmin,xmax,xinc,x,t -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=180.0_krealfp - xmax=330.0_krealfp - xinc=(xmax-xmin)/(nxpvs-1) -! c1xpvs=1.-xmin/xinc - c2xpvs=1./xinc - c1xpvs=1.-xmin*c2xpvs - do jx=1,nxpvs - x=xmin+(jx-1)*xinc - t=x - tbpvs(jx)=fpvsx(t) - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function fpvs(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvs Compute saturation vapor pressure -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute saturation vapor pressure from the temperature. -! A linear interpolation is done between values in a lookup table -! computed in gpvs. See documentation for fpvsx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is almost 6 decimal places. -! On the Cray, fpvs is about 4 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: pvs=fpvs(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvs Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvs - real(krealfp),intent(in):: t - integer jx - real(krealfp) xj -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xpvs+c2xpvs*t,1._krealfp),real(nxpvs,krealfp)) - jx=min(xj,nxpvs-1._krealfp) - fpvs=tbpvs(jx)+(xj-jx)*(tbpvs(jx+1)-tbpvs(jx)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function fpvsq(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvsq Compute saturation vapor pressure -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute saturation vapor pressure from the temperature. -! A quadratic interpolation is done between values in a lookup table -! computed in gpvs. See documentation for fpvsx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is almost 9 decimal places. -! On the Cray, fpvsq is about 3 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell quadratic interpolation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: pvs=fpvsq(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvsq Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvsq - real(krealfp),intent(in):: t - integer jx - real(krealfp) xj,dxj,fj1,fj2,fj3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xpvs+c2xpvs*t,1._krealfp),real(nxpvs,krealfp)) - jx=min(max(nint(xj),2),nxpvs-1) - dxj=xj-jx - fj1=tbpvs(jx-1) - fj2=tbpvs(jx) - fj3=tbpvs(jx+1) - fpvsq=(((fj3+fj1)/2-fj2)*dxj+(fj3-fj1)/2)*dxj+fj2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function fpvsx(t) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpvsx Compute saturation vapor pressure -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Exactly compute saturation vapor pressure from temperature. -! The saturation vapor pressure over either liquid and ice is computed -! over liquid for temperatures above the triple point, -! over ice for temperatures 20 degress below the triple point, -! and a linear combination of the two for temperatures in between. -! The water model assumes a perfect gas, constant specific heats -! for gas, liquid and ice, and neglects the volume of the condensate. -! The model does account for the variation of the latent heat -! of condensation and sublimation with temperature. -! The Clausius-Clapeyron equation is integrated from the triple point -! to get the formula -! pvsl=con_psat*(tr**xa)*exp(xb*(1.-tr)) -! where tr is ttp/t and other values are physical constants. -! The reference for this computation is Emanuel(1994), pages 116-117. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: pvs=fpvsx(t) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! -! Output argument list: -! fpvsx Real(krealfp) saturation vapor pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpvsx - real(krealfp),intent(in):: t - real(krealfp),parameter:: tliq=con_ttp - real(krealfp),parameter:: tice=con_ttp-20.0 - real(krealfp),parameter:: dldtl=con_cvap-con_cliq - real(krealfp),parameter:: heatl=con_hvap - real(krealfp),parameter:: xponal=-dldtl/con_rv - real(krealfp),parameter:: xponbl=-dldtl/con_rv+heatl/(con_rv*con_ttp) - real(krealfp),parameter:: dldti=con_cvap-con_csol - real(krealfp),parameter:: heati=con_hvap+con_hfus - real(krealfp),parameter:: xponai=-dldti/con_rv - real(krealfp),parameter:: xponbi=-dldti/con_rv+heati/(con_rv*con_ttp) - real(krealfp) tr,w,pvl,pvi -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - tr=con_ttp/t - if(t.ge.tliq) then - fpvsx=con_psat*(tr**xponal)*exp(xponbl*(1.-tr)) - elseif(t.lt.tice) then - fpvsx=con_psat*(tr**xponai)*exp(xponbi*(1.-tr)) - else - w=(t-tice)/(tliq-tice) - pvl=con_psat*(tr**xponal)*exp(xponbl*(1.-tr)) - pvi=con_psat*(tr**xponai)*exp(xponbi*(1.-tr)) - fpvsx=w*pvl+(1.-w)*pvi - endif -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gtdpl -!$$$ Subprogram Documentation Block -! -! Subprogram: gtdpl Compute dewpoint temperature over liquid table -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature table as a function of -! vapor pressure for inlinable function ftdpl. -! Exact dewpoint temperatures are calculated in subprogram ftdplxg. -! The current implementation computes a table with a length -! of 5001 for vapor pressures ranging from 1 to 10001 Pascals -! giving a dewpoint temperature range of 208 to 319 Kelvin. -! -! Program History Log: -! 91-05-07 Iredell -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! -! Usage: call gtdpl -! -! Subprograms called: -! (ftdplxg) inlinable function to compute dewpoint temperature over liquid -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx - real(krealfp) xmin,xmax,xinc,t,x,pv -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=1 - xmax=10001 - xinc=(xmax-xmin)/(nxtdpl-1) - c1xtdpl=1.-xmin/xinc - c2xtdpl=1./xinc - t=208.0 - do jx=1,nxtdpl - x=xmin+(jx-1)*xinc - pv=x - t=ftdplxg(t,pv) - tbtdpl(jx)=t - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function ftdpl(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdpl Compute dewpoint temperature over liquid -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature from vapor pressure. -! A linear interpolation is done between values in a lookup table -! computed in gtdpl. See documentation for ftdplxg for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is better than 0.0005 Kelvin -! for dewpoint temperatures greater than 250 Kelvin, -! but decreases to 0.02 Kelvin for a dewpoint around 230 Kelvin. -! On the Cray, ftdpl is about 75 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! -! Usage: tdpl=ftdpl(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdpl Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdpl - real(krealfp),intent(in):: pv - integer jx - real(krealfp) xj -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xtdpl+c2xtdpl*pv,1._krealfp),real(nxtdpl,krealfp)) - jx=min(xj,nxtdpl-1._krealfp) - ftdpl=tbtdpl(jx)+(xj-jx)*(tbtdpl(jx+1)-tbtdpl(jx)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdplq(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdplq Compute dewpoint temperature over liquid -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature from vapor pressure. -! A quadratic interpolation is done between values in a lookup table -! computed in gtdpl. see documentation for ftdplxg for details. -! Input values outside table range are reset to table extrema. -! the interpolation accuracy is better than 0.00001 Kelvin -! for dewpoint temperatures greater than 250 Kelvin, -! but decreases to 0.002 Kelvin for a dewpoint around 230 Kelvin. -! On the Cray, ftdplq is about 60 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell quadratic interpolation -! 1999-03-01 Iredell f90 module -! -! Usage: tdpl=ftdplq(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdplq Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdplq - real(krealfp),intent(in):: pv - integer jx - real(krealfp) xj,dxj,fj1,fj2,fj3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xtdpl+c2xtdpl*pv,1._krealfp),real(nxtdpl,krealfp)) - jx=min(max(nint(xj),2),nxtdpl-1) - dxj=xj-jx - fj1=tbtdpl(jx-1) - fj2=tbtdpl(jx) - fj3=tbtdpl(jx+1) - ftdplq=(((fj3+fj1)/2-fj2)*dxj+(fj3-fj1)/2)*dxj+fj2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdplx(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdplx Compute dewpoint temperature over liquid -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: exactly compute dewpoint temperature from vapor pressure. -! An approximate dewpoint temperature for function ftdplxg -! is obtained using ftdpl so gtdpl must be already called. -! See documentation for ftdplxg for details. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! -! Usage: tdpl=ftdplx(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdplx Real(krealfp) dewpoint temperature in Kelvin -! -! Subprograms called: -! (ftdpl) inlinable function to compute dewpoint temperature over liquid -! (ftdplxg) inlinable function to compute dewpoint temperature over liquid -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdplx - real(krealfp),intent(in):: pv - real(krealfp) tg -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - tg=ftdpl(pv) - ftdplx=ftdplxg(tg,pv) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdplxg(tg,pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdplxg Compute dewpoint temperature over liquid -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Exactly compute dewpoint temperature from vapor pressure. -! A guess dewpoint temperature must be provided. -! The water model assumes a perfect gas, constant specific heats -! for gas and liquid, and neglects the volume of the liquid. -! The model does account for the variation of the latent heat -! of condensation with temperature. The ice option is not included. -! The Clausius-Clapeyron equation is integrated from the triple point -! to get the formula -! pvs=con_psat*(tr**xa)*exp(xb*(1.-tr)) -! where tr is ttp/t and other values are physical constants. -! The formula is inverted by iterating Newtonian approximations -! for each pvs until t is found to within 1.e-6 Kelvin. -! This function can be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! -! Usage: tdpl=ftdplxg(tg,pv) -! -! Input argument list: -! tg Real(krealfp) guess dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdplxg Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdplxg - real(krealfp),intent(in):: tg,pv - real(krealfp),parameter:: terrm=1.e-6 - real(krealfp),parameter:: dldt=con_cvap-con_cliq - real(krealfp),parameter:: heat=con_hvap - real(krealfp),parameter:: xpona=-dldt/con_rv - real(krealfp),parameter:: xponb=-dldt/con_rv+heat/(con_rv*con_ttp) - real(krealfp) t,tr,pvt,el,dpvt,terr - integer i -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - t=tg - do i=1,100 - tr=con_ttp/t - pvt=con_psat*(tr**xpona)*exp(xponb*(1.-tr)) - el=heat+dldt*(t-con_ttp) - dpvt=el*pvt/(con_rv*t**2) - terr=(pvt-pv)/dpvt - t=t-terr - if(abs(terr).le.terrm) exit - enddo - ftdplxg=t -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gtdpi -!$$$ Subprogram Documentation Block -! -! Subprogram: gtdpi Compute dewpoint temperature over ice table -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature table as a function of -! vapor pressure for inlinable function ftdpi. -! Exact dewpoint temperatures are calculated in subprogram ftdpixg. -! The current implementation computes a table with a length -! of 5001 for vapor pressures ranging from 0.1 to 1000.1 Pascals -! giving a dewpoint temperature range of 197 to 279 Kelvin. -! -! Program History Log: -! 91-05-07 Iredell -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: call gtdpi -! -! Subprograms called: -! (ftdpixg) inlinable function to compute dewpoint temperature over ice -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx - real(krealfp) xmin,xmax,xinc,t,x,pv -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=0.1 - xmax=1000.1 - xinc=(xmax-xmin)/(nxtdpi-1) - c1xtdpi=1.-xmin/xinc - c2xtdpi=1./xinc - t=197.0 - do jx=1,nxtdpi - x=xmin+(jx-1)*xinc - pv=x - t=ftdpixg(t,pv) - tbtdpi(jx)=t - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function ftdpi(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdpi Compute dewpoint temperature over ice -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature from vapor pressure. -! A linear interpolation is done between values in a lookup table -! computed in gtdpi. See documentation for ftdpixg for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is better than 0.0005 Kelvin -! for dewpoint temperatures greater than 250 Kelvin, -! but decreases to 0.02 Kelvin for a dewpoint around 230 Kelvin. -! On the Cray, ftdpi is about 75 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: tdpi=ftdpi(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdpi Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdpi - real(krealfp),intent(in):: pv - integer jx - real(krealfp) xj -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xtdpi+c2xtdpi*pv,1._krealfp),real(nxtdpi,krealfp)) - jx=min(xj,nxtdpi-1._krealfp) - ftdpi=tbtdpi(jx)+(xj-jx)*(tbtdpi(jx+1)-tbtdpi(jx)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdpiq(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdpiq Compute dewpoint temperature over ice -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature from vapor pressure. -! A quadratic interpolation is done between values in a lookup table -! computed in gtdpi. see documentation for ftdpixg for details. -! Input values outside table range are reset to table extrema. -! the interpolation accuracy is better than 0.00001 Kelvin -! for dewpoint temperatures greater than 250 Kelvin, -! but decreases to 0.002 Kelvin for a dewpoint around 230 Kelvin. -! On the Cray, ftdpiq is about 60 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell quadratic interpolation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: tdpi=ftdpiq(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdpiq Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdpiq - real(krealfp),intent(in):: pv - integer jx - real(krealfp) xj,dxj,fj1,fj2,fj3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xtdpi+c2xtdpi*pv,1._krealfp),real(nxtdpi,krealfp)) - jx=min(max(nint(xj),2),nxtdpi-1) - dxj=xj-jx - fj1=tbtdpi(jx-1) - fj2=tbtdpi(jx) - fj3=tbtdpi(jx+1) - ftdpiq=(((fj3+fj1)/2-fj2)*dxj+(fj3-fj1)/2)*dxj+fj2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdpix(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdpix Compute dewpoint temperature over ice -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: exactly compute dewpoint temperature from vapor pressure. -! An approximate dewpoint temperature for function ftdpixg -! is obtained using ftdpi so gtdpi must be already called. -! See documentation for ftdpixg for details. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: tdpi=ftdpix(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdpix Real(krealfp) dewpoint temperature in Kelvin -! -! Subprograms called: -! (ftdpi) inlinable function to compute dewpoint temperature over ice -! (ftdpixg) inlinable function to compute dewpoint temperature over ice -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdpix - real(krealfp),intent(in):: pv - real(krealfp) tg -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - tg=ftdpi(pv) - ftdpix=ftdpixg(tg,pv) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdpixg(tg,pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdpixg Compute dewpoint temperature over ice -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Exactly compute dewpoint temperature from vapor pressure. -! A guess dewpoint temperature must be provided. -! The water model assumes a perfect gas, constant specific heats -! for gas and ice, and neglects the volume of the ice. -! The model does account for the variation of the latent heat -! of sublimation with temperature. The liquid option is not included. -! The Clausius-Clapeyron equation is integrated from the triple point -! to get the formula -! pvs=con_psat*(tr**xa)*exp(xb*(1.-tr)) -! where tr is ttp/t and other values are physical constants. -! The formula is inverted by iterating Newtonian approximations -! for each pvs until t is found to within 1.e-6 Kelvin. -! This function can be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: tdpi=ftdpixg(tg,pv) -! -! Input argument list: -! tg Real(krealfp) guess dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdpixg Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdpixg - real(krealfp),intent(in):: tg,pv - real(krealfp),parameter:: terrm=1.e-6 - real(krealfp),parameter:: dldt=con_cvap-con_csol - real(krealfp),parameter:: heat=con_hvap+con_hfus - real(krealfp),parameter:: xpona=-dldt/con_rv - real(krealfp),parameter:: xponb=-dldt/con_rv+heat/(con_rv*con_ttp) - real(krealfp) t,tr,pvt,el,dpvt,terr - integer i -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - t=tg - do i=1,100 - tr=con_ttp/t - pvt=con_psat*(tr**xpona)*exp(xponb*(1.-tr)) - el=heat+dldt*(t-con_ttp) - dpvt=el*pvt/(con_rv*t**2) - terr=(pvt-pv)/dpvt - t=t-terr - if(abs(terr).le.terrm) exit - enddo - ftdpixg=t -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gtdp -!$$$ Subprogram Documentation Block -! -! Subprogram: gtdp Compute dewpoint temperature table -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature table as a function of -! vapor pressure for inlinable function ftdp. -! Exact dewpoint temperatures are calculated in subprogram ftdpxg. -! The current implementation computes a table with a length -! of 5001 for vapor pressures ranging from 0.5 to 1000.5 Pascals -! giving a dewpoint temperature range of 208 to 319 Kelvin. -! -! Program History Log: -! 91-05-07 Iredell -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: call gtdp -! -! Subprograms called: -! (ftdpxg) inlinable function to compute dewpoint temperature -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx - real(krealfp) xmin,xmax,xinc,t,x,pv -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=0.5 - xmax=10000.5 - xinc=(xmax-xmin)/(nxtdp-1) - c1xtdp=1.-xmin/xinc - c2xtdp=1./xinc - t=208.0 - do jx=1,nxtdp - x=xmin+(jx-1)*xinc - pv=x - t=ftdpxg(t,pv) - tbtdp(jx)=t - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function ftdp(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdp Compute dewpoint temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature from vapor pressure. -! A linear interpolation is done between values in a lookup table -! computed in gtdp. See documentation for ftdpxg for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is better than 0.0005 Kelvin -! for dewpoint temperatures greater than 250 Kelvin, -! but decreases to 0.02 Kelvin for a dewpoint around 230 Kelvin. -! On the Cray, ftdp is about 75 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: tdp=ftdp(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdp Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdp - real(krealfp),intent(in):: pv - integer jx - real(krealfp) xj -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xtdp+c2xtdp*pv,1._krealfp),real(nxtdp,krealfp)) - jx=min(xj,nxtdp-1._krealfp) - ftdp=tbtdp(jx)+(xj-jx)*(tbtdp(jx+1)-tbtdp(jx)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdpq(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdpq Compute dewpoint temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute dewpoint temperature from vapor pressure. -! A quadratic interpolation is done between values in a lookup table -! computed in gtdp. see documentation for ftdpxg for details. -! Input values outside table range are reset to table extrema. -! the interpolation accuracy is better than 0.00001 Kelvin -! for dewpoint temperatures greater than 250 Kelvin, -! but decreases to 0.002 Kelvin for a dewpoint around 230 Kelvin. -! On the Cray, ftdpq is about 60 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell quadratic interpolation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: tdp=ftdpq(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdpq Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdpq - real(krealfp),intent(in):: pv - integer jx - real(krealfp) xj,dxj,fj1,fj2,fj3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xtdp+c2xtdp*pv,1._krealfp),real(nxtdp,krealfp)) - jx=min(max(nint(xj),2),nxtdp-1) - dxj=xj-jx - fj1=tbtdp(jx-1) - fj2=tbtdp(jx) - fj3=tbtdp(jx+1) - ftdpq=(((fj3+fj1)/2-fj2)*dxj+(fj3-fj1)/2)*dxj+fj2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdpx(pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdpx Compute dewpoint temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: exactly compute dewpoint temperature from vapor pressure. -! An approximate dewpoint temperature for function ftdpxg -! is obtained using ftdp so gtdp must be already called. -! See documentation for ftdpxg for details. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: tdp=ftdpx(pv) -! -! Input argument list: -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdpx Real(krealfp) dewpoint temperature in Kelvin -! -! Subprograms called: -! (ftdp) inlinable function to compute dewpoint temperature -! (ftdpxg) inlinable function to compute dewpoint temperature -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdpx - real(krealfp),intent(in):: pv - real(krealfp) tg -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - tg=ftdp(pv) - ftdpx=ftdpxg(tg,pv) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftdpxg(tg,pv) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftdpxg Compute dewpoint temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Exactly compute dewpoint temperature from vapor pressure. -! A guess dewpoint temperature must be provided. -! The saturation vapor pressure over either liquid and ice is computed -! over liquid for temperatures above the triple point, -! over ice for temperatures 20 degress below the triple point, -! and a linear combination of the two for temperatures in between. -! The water model assumes a perfect gas, constant specific heats -! for gas, liquid and ice, and neglects the volume of the condensate. -! The model does account for the variation of the latent heat -! of condensation and sublimation with temperature. -! The Clausius-Clapeyron equation is integrated from the triple point -! to get the formula -! pvsl=con_psat*(tr**xa)*exp(xb*(1.-tr)) -! where tr is ttp/t and other values are physical constants. -! The reference for this decision is Emanuel(1994), pages 116-117. -! The formula is inverted by iterating Newtonian approximations -! for each pvs until t is found to within 1.e-6 Kelvin. -! This function can be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! 2001-02-26 Iredell ice phase -! -! Usage: tdp=ftdpxg(tg,pv) -! -! Input argument list: -! tg Real(krealfp) guess dewpoint temperature in Kelvin -! pv Real(krealfp) vapor pressure in Pascals -! -! Output argument list: -! ftdpxg Real(krealfp) dewpoint temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftdpxg - real(krealfp),intent(in):: tg,pv - real(krealfp),parameter:: terrm=1.e-6 - real(krealfp),parameter:: tliq=con_ttp - real(krealfp),parameter:: tice=con_ttp-20.0 - real(krealfp),parameter:: dldtl=con_cvap-con_cliq - real(krealfp),parameter:: heatl=con_hvap - real(krealfp),parameter:: xponal=-dldtl/con_rv - real(krealfp),parameter:: xponbl=-dldtl/con_rv+heatl/(con_rv*con_ttp) - real(krealfp),parameter:: dldti=con_cvap-con_csol - real(krealfp),parameter:: heati=con_hvap+con_hfus - real(krealfp),parameter:: xponai=-dldti/con_rv - real(krealfp),parameter:: xponbi=-dldti/con_rv+heati/(con_rv*con_ttp) - real(krealfp) t,tr,w,pvtl,pvti,pvt,ell,eli,el,dpvt,terr - integer i -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - t=tg - do i=1,100 - tr=con_ttp/t - if(t.ge.tliq) then - pvt=con_psat*(tr**xponal)*exp(xponbl*(1.-tr)) - el=heatl+dldtl*(t-con_ttp) - dpvt=el*pvt/(con_rv*t**2) - elseif(t.lt.tice) then - pvt=con_psat*(tr**xponai)*exp(xponbi*(1.-tr)) - el=heati+dldti*(t-con_ttp) - dpvt=el*pvt/(con_rv*t**2) - else - w=(t-tice)/(tliq-tice) - pvtl=con_psat*(tr**xponal)*exp(xponbl*(1.-tr)) - pvti=con_psat*(tr**xponai)*exp(xponbi*(1.-tr)) - pvt=w*pvtl+(1.-w)*pvti - ell=heatl+dldtl*(t-con_ttp) - eli=heati+dldti*(t-con_ttp) - dpvt=(w*ell*pvtl+(1.-w)*eli*pvti)/(con_rv*t**2) - endif - terr=(pvt-pv)/dpvt - t=t-terr - if(abs(terr).le.terrm) exit - enddo - ftdpxg=t -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gthe -!$$$ Subprogram Documentation Block -! -! Subprogram: gthe Compute equivalent potential temperature table -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute equivalent potential temperature table -! as a function of LCL temperature and pressure over 1e5 Pa -! to the kappa power for function fthe. -! Equivalent potential temperatures are calculated in subprogram fthex -! the current implementation computes a table with a first dimension -! of 241 for temperatures ranging from 183.16 to 303.16 Kelvin -! and a second dimension of 151 for pressure over 1e5 Pa -! to the kappa power ranging from 0.04**rocp to 1.10**rocp. -! -! Program History Log: -! 91-05-07 Iredell -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! -! Usage: call gthe -! -! Subprograms called: -! (fthex) inlinable function to compute equiv. pot. temperature -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx,jy - real(krealfp) xmin,xmax,ymin,ymax,xinc,yinc,x,y,pk,t -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=con_ttp-90._krealfp - xmax=con_ttp+30._krealfp - ymin=0.04_krealfp**con_rocp - ymax=1.10_krealfp**con_rocp - xinc=(xmax-xmin)/(nxthe-1) - c1xthe=1.-xmin/xinc - c2xthe=1./xinc - yinc=(ymax-ymin)/(nythe-1) - c1ythe=1.-ymin/yinc - c2ythe=1./yinc - do jy=1,nythe - y=ymin+(jy-1)*yinc - pk=y - do jx=1,nxthe - x=xmin+(jx-1)*xinc - t=x - tbthe(jx,jy)=fthex(t,pk) - enddo - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function fthe(t,pk) -!$$$ Subprogram Documentation Block -! -! Subprogram: fthe Compute equivalent potential temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute equivalent potential temperature at the LCL -! from temperature and pressure over 1e5 Pa to the kappa power. -! A bilinear interpolation is done between values in a lookup table -! computed in gthe. see documentation for fthex for details. -! Input values outside table range are reset to table extrema, -! except zero is returned for too cold or high LCLs. -! The interpolation accuracy is better than 0.01 Kelvin. -! On the Cray, fthe is almost 6 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! -! Usage: the=fthe(t,pk) -! -! Input argument list: -! t Real(krealfp) LCL temperature in Kelvin -! pk Real(krealfp) LCL pressure over 1e5 Pa to the kappa power -! -! Output argument list: -! fthe Real(krealfp) equivalent potential temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fthe - real(krealfp),intent(in):: t,pk - integer jx,jy - real(krealfp) xj,yj,ftx1,ftx2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(c1xthe+c2xthe*t,real(nxthe,krealfp)) - yj=min(c1ythe+c2ythe*pk,real(nythe,krealfp)) - if(xj.ge.1..and.yj.ge.1.) then - jx=min(xj,nxthe-1._krealfp) - jy=min(yj,nythe-1._krealfp) - ftx1=tbthe(jx,jy)+(xj-jx)*(tbthe(jx+1,jy)-tbthe(jx,jy)) - ftx2=tbthe(jx,jy+1)+(xj-jx)*(tbthe(jx+1,jy+1)-tbthe(jx,jy+1)) - fthe=ftx1+(yj-jy)*(ftx2-ftx1) - else - fthe=0. - endif -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftheq(t,pk) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftheq Compute equivalent potential temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute equivalent potential temperature at the LCL -! from temperature and pressure over 1e5 Pa to the kappa power. -! A biquadratic interpolation is done between values in a lookup table -! computed in gthe. see documentation for fthex for details. -! Input values outside table range are reset to table extrema, -! except zero is returned for too cold or high LCLs. -! The interpolation accuracy is better than 0.0002 Kelvin. -! On the Cray, ftheq is almost 3 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell quadratic interpolation -! 1999-03-01 Iredell f90 module -! -! Usage: the=ftheq(t,pk) -! -! Input argument list: -! t Real(krealfp) LCL temperature in Kelvin -! pk Real(krealfp) LCL pressure over 1e5 Pa to the kappa power -! -! Output argument list: -! ftheq Real(krealfp) equivalent potential temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftheq - real(krealfp),intent(in):: t,pk - integer jx,jy - real(krealfp) xj,yj,dxj,dyj - real(krealfp) ft11,ft12,ft13,ft21,ft22,ft23,ft31,ft32,ft33 - real(krealfp) ftx1,ftx2,ftx3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(c1xthe+c2xthe*t,real(nxthe,krealfp)) - yj=min(c1ythe+c2ythe*pk,real(nythe,krealfp)) - if(xj.ge.1..and.yj.ge.1.) then - jx=min(max(nint(xj),2),nxthe-1) - jy=min(max(nint(yj),2),nythe-1) - dxj=xj-jx - dyj=yj-jy - ft11=tbthe(jx-1,jy-1) - ft12=tbthe(jx-1,jy) - ft13=tbthe(jx-1,jy+1) - ft21=tbthe(jx,jy-1) - ft22=tbthe(jx,jy) - ft23=tbthe(jx,jy+1) - ft31=tbthe(jx+1,jy-1) - ft32=tbthe(jx+1,jy) - ft33=tbthe(jx+1,jy+1) - ftx1=(((ft31+ft11)/2-ft21)*dxj+(ft31-ft11)/2)*dxj+ft21 - ftx2=(((ft32+ft12)/2-ft22)*dxj+(ft32-ft12)/2)*dxj+ft22 - ftx3=(((ft33+ft13)/2-ft23)*dxj+(ft33-ft13)/2)*dxj+ft23 - ftheq=(((ftx3+ftx1)/2-ftx2)*dyj+(ftx3-ftx1)/2)*dyj+ftx2 - else - ftheq=0. - endif -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- -! elemental function fthex(t,pk) - function fthex(t,pk) -!$$$ Subprogram Documentation Block -! -! Subprogram: fthex Compute equivalent potential temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Exactly compute equivalent potential temperature at the LCL -! from temperature and pressure over 1e5 Pa to the kappa power. -! Equivalent potential temperature is constant for a saturated parcel -! rising adiabatically up a moist adiabat when the heat and mass -! of the condensed water are neglected. Ice is also neglected. -! The formula for equivalent potential temperature (Holton) is -! the=t*(pd**(-rocp))*exp(el*eps*pv/(cp*t*pd)) -! where t is the temperature, pv is the saturated vapor pressure, -! pd is the dry pressure p-pv, el is the temperature dependent -! latent heat of condensation hvap+dldt*(t-ttp), and other values -! are physical constants defined in parameter statements in the code. -! Zero is returned if the input values make saturation impossible. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! -! Usage: the=fthex(t,pk) -! -! Input argument list: -! t Real(krealfp) LCL temperature in Kelvin -! pk Real(krealfp) LCL pressure over 1e5 Pa to the kappa power -! -! Output argument list: -! fthex Real(krealfp) equivalent potential temperature in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fthex - real(krealfp),intent(in):: t,pk - real(krealfp) p,tr,pv,pd,el,expo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - p=pk**con_cpor - tr=con_ttp/t - pv=psatb*(tr**con_xpona)*exp(con_xponb*(1.-tr)) - pd=p-pv - if(pd.gt.pv) then - el=con_hvap+con_dldt*(t-con_ttp) - expo=el*con_eps*pv/(con_cp*t*pd) - fthex=t*pd**(-con_rocp)*exp(expo) - else - fthex=0. - endif -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gtma -!$$$ Subprogram Documentation Block -! -! Subprogram: gtma Compute moist adiabat tables -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute temperature and specific humidity tables -! as a function of equivalent potential temperature and -! pressure over 1e5 Pa to the kappa power for subprogram stma. -! Exact parcel temperatures are calculated in subprogram stmaxg. -! The current implementation computes a table with a first dimension -! of 151 for equivalent potential temperatures ranging from 200 to 500 -! Kelvin and a second dimension of 121 for pressure over 1e5 Pa -! to the kappa power ranging from 0.01**rocp to 1.10**rocp. -! -! Program History Log: -! 91-05-07 Iredell -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! -! Usage: call gtma -! -! Subprograms called: -! (stmaxg) inlinable subprogram to compute parcel temperature -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx,jy - real(krealfp) xmin,xmax,ymin,ymax,xinc,yinc,x,y,pk,the,t,q,tg -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=200._krealfp - xmax=500._krealfp - ymin=0.01_krealfp**con_rocp - ymax=1.10_krealfp**con_rocp - xinc=(xmax-xmin)/(nxma-1) - c1xma=1.-xmin/xinc - c2xma=1./xinc - yinc=(ymax-ymin)/(nyma-1) - c1yma=1.-ymin/yinc - c2yma=1./yinc - do jy=1,nyma - y=ymin+(jy-1)*yinc - pk=y - tg=xmin*y - do jx=1,nxma - x=xmin+(jx-1)*xinc - the=x - call stmaxg(tg,the,pk,t,q) - tbtma(jx,jy)=t - tbqma(jx,jy)=q - tg=t - enddo - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental subroutine stma(the,pk,tma,qma) -!$$$ Subprogram Documentation Block -! -! Subprogram: stma Compute moist adiabat temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute temperature and specific humidity of a parcel -! lifted up a moist adiabat from equivalent potential temperature -! at the LCL and pressure over 1e5 Pa to the kappa power. -! Bilinear interpolations are done between values in a lookup table -! computed in gtma. See documentation for stmaxg for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is better than 0.01 Kelvin -! and 5.e-6 kg/kg for temperature and humidity, respectively. -! On the Cray, stma is about 35 times faster than exact calculation. -! This subprogram should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell expand table -! 1999-03-01 Iredell f90 module -! -! Usage: call stma(the,pk,tma,qma) -! -! Input argument list: -! the Real(krealfp) equivalent potential temperature in Kelvin -! pk Real(krealfp) pressure over 1e5 Pa to the kappa power -! -! Output argument list: -! tma Real(krealfp) parcel temperature in Kelvin -! qma Real(krealfp) parcel specific humidity in kg/kg -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp),intent(in):: the,pk - real(krealfp),intent(out):: tma,qma - integer jx,jy - real(krealfp) xj,yj,ftx1,ftx2,qx1,qx2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xma+c2xma*the,1._krealfp),real(nxma,krealfp)) - yj=min(max(c1yma+c2yma*pk,1._krealfp),real(nyma,krealfp)) - jx=min(xj,nxma-1._krealfp) - jy=min(yj,nyma-1._krealfp) - ftx1=tbtma(jx,jy)+(xj-jx)*(tbtma(jx+1,jy)-tbtma(jx,jy)) - ftx2=tbtma(jx,jy+1)+(xj-jx)*(tbtma(jx+1,jy+1)-tbtma(jx,jy+1)) - tma=ftx1+(yj-jy)*(ftx2-ftx1) - qx1=tbqma(jx,jy)+(xj-jx)*(tbqma(jx+1,jy)-tbqma(jx,jy)) - qx2=tbqma(jx,jy+1)+(xj-jx)*(tbqma(jx+1,jy+1)-tbqma(jx,jy+1)) - qma=qx1+(yj-jy)*(qx2-qx1) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental subroutine stmaq(the,pk,tma,qma) -!$$$ Subprogram Documentation Block -! -! Subprogram: stmaq Compute moist adiabat temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute temperature and specific humidity of a parcel -! lifted up a moist adiabat from equivalent potential temperature -! at the LCL and pressure over 1e5 Pa to the kappa power. -! Biquadratic interpolations are done between values in a lookup table -! computed in gtma. See documentation for stmaxg for details. -! Input values outside table range are reset to table extrema. -! the interpolation accuracy is better than 0.0005 Kelvin -! and 1.e-7 kg/kg for temperature and humidity, respectively. -! On the Cray, stmaq is about 25 times faster than exact calculation. -! This subprogram should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell quadratic interpolation -! 1999-03-01 Iredell f90 module -! -! Usage: call stmaq(the,pk,tma,qma) -! -! Input argument list: -! the Real(krealfp) equivalent potential temperature in Kelvin -! pk Real(krealfp) pressure over 1e5 Pa to the kappa power -! -! Output argument list: -! tmaq Real(krealfp) parcel temperature in Kelvin -! qma Real(krealfp) parcel specific humidity in kg/kg -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp),intent(in):: the,pk - real(krealfp),intent(out):: tma,qma - integer jx,jy - real(krealfp) xj,yj,dxj,dyj - real(krealfp) ft11,ft12,ft13,ft21,ft22,ft23,ft31,ft32,ft33 - real(krealfp) ftx1,ftx2,ftx3 - real(krealfp) q11,q12,q13,q21,q22,q23,q31,q32,q33,qx1,qx2,qx3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xma+c2xma*the,1._krealfp),real(nxma,krealfp)) - yj=min(max(c1yma+c2yma*pk,1._krealfp),real(nyma,krealfp)) - jx=min(max(nint(xj),2),nxma-1) - jy=min(max(nint(yj),2),nyma-1) - dxj=xj-jx - dyj=yj-jy - ft11=tbtma(jx-1,jy-1) - ft12=tbtma(jx-1,jy) - ft13=tbtma(jx-1,jy+1) - ft21=tbtma(jx,jy-1) - ft22=tbtma(jx,jy) - ft23=tbtma(jx,jy+1) - ft31=tbtma(jx+1,jy-1) - ft32=tbtma(jx+1,jy) - ft33=tbtma(jx+1,jy+1) - ftx1=(((ft31+ft11)/2-ft21)*dxj+(ft31-ft11)/2)*dxj+ft21 - ftx2=(((ft32+ft12)/2-ft22)*dxj+(ft32-ft12)/2)*dxj+ft22 - ftx3=(((ft33+ft13)/2-ft23)*dxj+(ft33-ft13)/2)*dxj+ft23 - tma=(((ftx3+ftx1)/2-ftx2)*dyj+(ftx3-ftx1)/2)*dyj+ftx2 - q11=tbqma(jx-1,jy-1) - q12=tbqma(jx-1,jy) - q13=tbqma(jx-1,jy+1) - q21=tbqma(jx,jy-1) - q22=tbqma(jx,jy) - q23=tbqma(jx,jy+1) - q31=tbqma(jx+1,jy-1) - q32=tbqma(jx+1,jy) - q33=tbqma(jx+1,jy+1) - qx1=(((q31+q11)/2-q21)*dxj+(q31-q11)/2)*dxj+q21 - qx2=(((q32+q12)/2-q22)*dxj+(q32-q12)/2)*dxj+q22 - qx3=(((q33+q13)/2-q23)*dxj+(q33-q13)/2)*dxj+q23 - qma=(((qx3+qx1)/2-qx2)*dyj+(qx3-qx1)/2)*dyj+qx2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental subroutine stmax(the,pk,tma,qma) -!$$$ Subprogram Documentation Block -! -! Subprogram: stmax Compute moist adiabat temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Exactly compute temperature and humidity of a parcel -! lifted up a moist adiabat from equivalent potential temperature -! at the LCL and pressure over 1e5 Pa to the kappa power. -! An approximate parcel temperature for subprogram stmaxg -! is obtained using stma so gtma must be already called. -! See documentation for stmaxg for details. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! -! Usage: call stmax(the,pk,tma,qma) -! -! Input argument list: -! the Real(krealfp) equivalent potential temperature in Kelvin -! pk Real(krealfp) pressure over 1e5 Pa to the kappa power -! -! Output argument list: -! tma Real(krealfp) parcel temperature in Kelvin -! qma Real(krealfp) parcel specific humidity in kg/kg -! -! Subprograms called: -! (stma) inlinable subprogram to compute parcel temperature -! (stmaxg) inlinable subprogram to compute parcel temperature -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp),intent(in):: the,pk - real(krealfp),intent(out):: tma,qma - real(krealfp) tg,qg -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - call stma(the,pk,tg,qg) - call stmaxg(tg,the,pk,tma,qma) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental subroutine stmaxg(tg,the,pk,tma,qma) -!$$$ Subprogram Documentation Block -! -! Subprogram: stmaxg Compute moist adiabat temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: exactly compute temperature and humidity of a parcel -! lifted up a moist adiabat from equivalent potential temperature -! at the LCL and pressure over 1e5 Pa to the kappa power. -! A guess parcel temperature must be provided. -! Equivalent potential temperature is constant for a saturated parcel -! rising adiabatically up a moist adiabat when the heat and mass -! of the condensed water are neglected. Ice is also neglected. -! The formula for equivalent potential temperature (Holton) is -! the=t*(pd**(-rocp))*exp(el*eps*pv/(cp*t*pd)) -! where t is the temperature, pv is the saturated vapor pressure, -! pd is the dry pressure p-pv, el is the temperature dependent -! latent heat of condensation hvap+dldt*(t-ttp), and other values -! are physical constants defined in parameter statements in the code. -! The formula is inverted by iterating Newtonian approximations -! for each the and p until t is found to within 1.e-4 Kelvin. -! The specific humidity is then computed from pv and pd. -! This subprogram can be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell exact computation -! 1999-03-01 Iredell f90 module -! -! Usage: call stmaxg(tg,the,pk,tma,qma) -! -! Input argument list: -! tg Real(krealfp) guess parcel temperature in Kelvin -! the Real(krealfp) equivalent potential temperature in Kelvin -! pk Real(krealfp) pressure over 1e5 Pa to the kappa power -! -! Output argument list: -! tma Real(krealfp) parcel temperature in Kelvin -! qma Real(krealfp) parcel specific humidity in kg/kg -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp),intent(in):: tg,the,pk - real(krealfp),intent(out):: tma,qma - real(krealfp),parameter:: terrm=1.e-4 - real(krealfp) t,p,tr,pv,pd,el,expo,thet,dthet,terr - integer i -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - t=tg - p=pk**con_cpor - do i=1,100 - tr=con_ttp/t - pv=psatb*(tr**con_xpona)*exp(con_xponb*(1.-tr)) - pd=p-pv - el=con_hvap+con_dldt*(t-con_ttp) - expo=el*con_eps*pv/(con_cp*t*pd) - thet=t*pd**(-con_rocp)*exp(expo) - dthet=thet/t*(1.+expo*(con_dldt*t/el+el*p/(con_rv*t*pd))) - terr=(thet-the)/dthet - t=t-terr - if(abs(terr).le.terrm) exit - enddo - tma=t - tr=con_ttp/t - pv=psatb*(tr**con_xpona)*exp(con_xponb*(1.-tr)) - pd=p-pv - qma=con_eps*pv/(pd+con_eps*pv) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - subroutine gpkap -!$$$ Subprogram documentation block -! -! Subprogram: gpkap Compute coefficients for p**kappa -! Author: Phillips org: w/NMC2X2 Date: 29 dec 82 -! -! Abstract: Computes pressure to the kappa table as a function of pressure -! for the table lookup function fpkap. -! Exact pressure to the kappa values are calculated in subprogram fpkapx. -! The current implementation computes a table with a length -! of 5501 for pressures ranging up to 110000 Pascals. -! -! Program History Log: -! 94-12-30 Iredell -! 1999-03-01 Iredell f90 module -! 1999-03-24 Iredell table lookup -! -! Usage: call gpkap -! -! Subprograms called: -! fpkapx function to compute exact pressure to the kappa -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx - real(krealfp) xmin,xmax,xinc,x,p -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=0._krealfp - xmax=110000._krealfp - xinc=(xmax-xmin)/(nxpkap-1) - c1xpkap=1.-xmin/xinc - c2xpkap=1./xinc - do jx=1,nxpkap - x=xmin+(jx-1)*xinc - p=x - tbpkap(jx)=fpkapx(p) - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function fpkap(p) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpkap raise pressure to the kappa power. -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Raise pressure over 1e5 Pa to the kappa power. -! A linear interpolation is done between values in a lookup table -! computed in gpkap. See documentation for fpkapx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy ranges from 9 decimal places -! at 100000 Pascals to 5 decimal places at 1000 Pascals. -! On the Cray, fpkap is over 5 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell standardized kappa, -! increased range and accuracy -! 1999-03-01 Iredell f90 module -! 1999-03-24 Iredell table lookup -! -! Usage: pkap=fpkap(p) -! -! Input argument list: -! p Real(krealfp) pressure in Pascals -! -! Output argument list: -! fpkap Real(krealfp) p over 1e5 Pa to the kappa power -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpkap - real(krealfp),intent(in):: p - integer jx - real(krealfp) xj -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xpkap+c2xpkap*p,1._krealfp),real(nxpkap,krealfp)) - jx=min(xj,nxpkap-1._krealfp) - fpkap=tbpkap(jx)+(xj-jx)*(tbpkap(jx+1)-tbpkap(jx)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function fpkapq(p) -!$$$ Subprogram Documentation Block -! -! Subprogram: fpkapq raise pressure to the kappa power. -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Raise pressure over 1e5 Pa to the kappa power. -! A quadratic interpolation is done between values in a lookup table -! computed in gpkap. see documentation for fpkapx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy ranges from 12 decimal places -! at 100000 Pascals to 7 decimal places at 1000 Pascals. -! On the Cray, fpkap is over 4 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell standardized kappa, -! increased range and accuracy -! 1999-03-01 Iredell f90 module -! 1999-03-24 Iredell table lookup -! -! Usage: pkap=fpkapq(p) -! -! Input argument list: -! p Real(krealfp) pressure in Pascals -! -! Output argument list: -! fpkapq Real(krealfp) p over 1e5 Pa to the kappa power -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpkapq - real(krealfp),intent(in):: p - integer jx - real(krealfp) xj,dxj,fj1,fj2,fj3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xpkap+c2xpkap*p,1._krealfp),real(nxpkap,krealfp)) - jx=min(max(nint(xj),2),nxpkap-1) - dxj=xj-jx - fj1=tbpkap(jx-1) - fj2=tbpkap(jx) - fj3=tbpkap(jx+1) - fpkapq=(((fj3+fj1)/2-fj2)*dxj+(fj3-fj1)/2)*dxj+fj2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - function fpkapo(p) -!$$$ Subprogram documentation block -! -! Subprogram: fpkapo raise surface pressure to the kappa power. -! Author: Phillips org: w/NMC2X2 Date: 29 dec 82 -! -! Abstract: Raise surface pressure over 1e5 Pa to the kappa power -! using a rational weighted chebyshev approximation. -! The numerator is of order 2 and the denominator is of order 4. -! The pressure range is 40000-110000 Pa and kappa is defined in fpkapx. -! The accuracy of this approximation is almost 8 decimal places. -! On the Cray, fpkap is over 10 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell standardized kappa, -! increased range and accuracy -! 1999-03-01 Iredell f90 module -! -! Usage: pkap=fpkapo(p) -! -! Input argument list: -! p Real(krealfp) surface pressure in Pascals -! p should be in the range 40000 to 110000 -! -! Output argument list: -! fpkapo Real(krealfp) p over 1e5 Pa to the kappa power -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpkapo - real(krealfp),intent(in):: p - integer,parameter:: nnpk=2,ndpk=4 - real(krealfp):: cnpk(0:nnpk)=(/3.13198449e-1,5.78544829e-2,& - 8.35491871e-4/) - real(krealfp):: cdpk(0:ndpk)=(/1.,8.15968401e-2,5.72839518e-4,& - -4.86959812e-7,5.24459889e-10/) - integer n - real(krealfp) pkpa,fnpk,fdpk -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - pkpa=p*1.e-3_krealfp - fnpk=cnpk(nnpk) - do n=nnpk-1,0,-1 - fnpk=pkpa*fnpk+cnpk(n) - enddo - fdpk=cdpk(ndpk) - do n=ndpk-1,0,-1 - fdpk=pkpa*fdpk+cdpk(n) - enddo - fpkapo=fnpk/fdpk -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function fpkapx(p) -!$$$ Subprogram documentation block -! -! Subprogram: fpkapx raise pressure to the kappa power. -! Author: Phillips org: w/NMC2X2 Date: 29 dec 82 -! -! Abstract: raise pressure over 1e5 Pa to the kappa power. -! Kappa is equal to rd/cp where rd and cp are physical constants. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 94-12-30 Iredell made into inlinable function -! 1999-03-01 Iredell f90 module -! -! Usage: pkap=fpkapx(p) -! -! Input argument list: -! p Real(krealfp) pressure in Pascals -! -! Output argument list: -! fpkapx Real(krealfp) p over 1e5 Pa to the kappa power -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) fpkapx - real(krealfp),intent(in):: p -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - fpkapx=(p/1.e5_krealfp)**con_rocp -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine grkap -!$$$ Subprogram documentation block -! -! Subprogram: grkap Compute coefficients for p**(1/kappa) -! Author: Phillips org: w/NMC2X2 Date: 29 dec 82 -! -! Abstract: Computes pressure to the 1/kappa table as a function of pressure -! for the table lookup function frkap. -! Exact pressure to the 1/kappa values are calculated in subprogram frkapx. -! The current implementation computes a table with a length -! of 5501 for pressures ranging up to 110000 Pascals. -! -! Program History Log: -! 94-12-30 Iredell -! 1999-03-01 Iredell f90 module -! 1999-03-24 Iredell table lookup -! -! Usage: call grkap -! -! Subprograms called: -! frkapx function to compute exact pressure to the 1/kappa -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx - real(krealfp) xmin,xmax,xinc,x,p -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=0._krealfp - xmax=fpkapx(110000._krealfp) - xinc=(xmax-xmin)/(nxrkap-1) - c1xrkap=1.-xmin/xinc - c2xrkap=1./xinc - do jx=1,nxrkap - x=xmin+(jx-1)*xinc - p=x - tbrkap(jx)=frkapx(p) - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function frkap(pkap) -!$$$ Subprogram Documentation Block -! -! Subprogram: frkap raise pressure to the 1/kappa power. -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Raise pressure over 1e5 Pa to the 1/kappa power. -! A linear interpolation is done between values in a lookup table -! computed in grkap. See documentation for frkapx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is better than 7 decimal places. -! On the IBM, fpkap is about 4 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell standardized kappa, -! increased range and accuracy -! 1999-03-01 Iredell f90 module -! 1999-03-24 Iredell table lookup -! -! Usage: p=frkap(pkap) -! -! Input argument list: -! pkap Real(krealfp) p over 1e5 Pa to the kappa power -! -! Output argument list: -! frkap Real(krealfp) pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) frkap - real(krealfp),intent(in):: pkap - integer jx - real(krealfp) xj -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xrkap+c2xrkap*pkap,1._krealfp),real(nxrkap,krealfp)) - jx=min(xj,nxrkap-1._krealfp) - frkap=tbrkap(jx)+(xj-jx)*(tbrkap(jx+1)-tbrkap(jx)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function frkapq(pkap) -!$$$ Subprogram Documentation Block -! -! Subprogram: frkapq raise pressure to the 1/kappa power. -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Raise pressure over 1e5 Pa to the 1/kappa power. -! A quadratic interpolation is done between values in a lookup table -! computed in grkap. see documentation for frkapx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is better than 11 decimal places. -! On the IBM, fpkap is almost 4 times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 94-12-30 Iredell standardized kappa, -! increased range and accuracy -! 1999-03-01 Iredell f90 module -! 1999-03-24 Iredell table lookup -! -! Usage: p=frkapq(pkap) -! -! Input argument list: -! pkap Real(krealfp) p over 1e5 Pa to the kappa power -! -! Output argument list: -! frkapq Real(krealfp) pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) frkapq - real(krealfp),intent(in):: pkap - integer jx - real(krealfp) xj,dxj,fj1,fj2,fj3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xrkap+c2xrkap*pkap,1._krealfp),real(nxrkap,krealfp)) - jx=min(max(nint(xj),2),nxrkap-1) - dxj=xj-jx - fj1=tbrkap(jx-1) - fj2=tbrkap(jx) - fj3=tbrkap(jx+1) - frkapq=(((fj3+fj1)/2-fj2)*dxj+(fj3-fj1)/2)*dxj+fj2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function frkapx(pkap) -!$$$ Subprogram documentation block -! -! Subprogram: frkapx raise pressure to the 1/kappa power. -! Author: Phillips org: w/NMC2X2 Date: 29 dec 82 -! -! Abstract: raise pressure over 1e5 Pa to the 1/kappa power. -! Kappa is equal to rd/cp where rd and cp are physical constants. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 94-12-30 Iredell made into inlinable function -! 1999-03-01 Iredell f90 module -! -! Usage: p=frkapx(pkap) -! -! Input argument list: -! pkap Real(krealfp) p over 1e5 Pa to the kappa power -! -! Output argument list: -! frkapx Real(krealfp) pressure in Pascals -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) frkapx - real(krealfp),intent(in):: pkap -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - frkapx=pkap**(1/con_rocp)*1.e5_krealfp -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gtlcl -!$$$ Subprogram Documentation Block -! -! Subprogram: gtlcl Compute equivalent potential temperature table -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute lifting condensation level temperature table -! as a function of temperature and dewpoint depression for function ftlcl. -! Lifting condensation level temperature is calculated in subprogram ftlclx -! The current implementation computes a table with a first dimension -! of 151 for temperatures ranging from 180.0 to 330.0 Kelvin -! and a second dimension of 61 for dewpoint depression ranging from -! 0 to 60 Kelvin. -! -! Program History Log: -! 1999-03-01 Iredell f90 module -! -! Usage: call gtlcl -! -! Subprograms called: -! (ftlclx) inlinable function to compute LCL temperature -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - integer jx,jy - real(krealfp) xmin,xmax,ymin,ymax,xinc,yinc,x,y,tdpd,t -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xmin=180._krealfp - xmax=330._krealfp - ymin=0._krealfp - ymax=60._krealfp - xinc=(xmax-xmin)/(nxtlcl-1) - c1xtlcl=1.-xmin/xinc - c2xtlcl=1./xinc - yinc=(ymax-ymin)/(nytlcl-1) - c1ytlcl=1.-ymin/yinc - c2ytlcl=1./yinc - do jy=1,nytlcl - y=ymin+(jy-1)*yinc - tdpd=y - do jx=1,nxtlcl - x=xmin+(jx-1)*xinc - t=x - tbtlcl(jx,jy)=ftlclx(t,tdpd) - enddo - enddo -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- - elemental function ftlcl(t,tdpd) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftlcl Compute LCL temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute temperature at the lifting condensation level -! from temperature and dewpoint depression. -! A bilinear interpolation is done between values in a lookup table -! computed in gtlcl. See documentation for ftlclx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is better than 0.0005 Kelvin. -! On the Cray, ftlcl is ? times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 1999-03-01 Iredell f90 module -! -! Usage: tlcl=ftlcl(t,tdpd) -! -! Input argument list: -! t Real(krealfp) LCL temperature in Kelvin -! tdpd Real(krealfp) dewpoint depression in Kelvin -! -! Output argument list: -! ftlcl Real(krealfp) temperature at the LCL in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftlcl - real(krealfp),intent(in):: t,tdpd - integer jx,jy - real(krealfp) xj,yj,ftx1,ftx2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xtlcl+c2xtlcl*t,1._krealfp),real(nxtlcl,krealfp)) - yj=min(max(c1ytlcl+c2ytlcl*tdpd,1._krealfp),real(nytlcl,krealfp)) - jx=min(xj,nxtlcl-1._krealfp) - jy=min(yj,nytlcl-1._krealfp) - ftx1=tbtlcl(jx,jy)+(xj-jx)*(tbtlcl(jx+1,jy)-tbtlcl(jx,jy)) - ftx2=tbtlcl(jx,jy+1)+(xj-jx)*(tbtlcl(jx+1,jy+1)-tbtlcl(jx,jy+1)) - ftlcl=ftx1+(yj-jy)*(ftx2-ftx1) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftlclq(t,tdpd) -!$$$ Subprogram Documentation Block -! -! Subprogram: ftlclq Compute LCL temperature -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute temperature at the lifting condensation level -! from temperature and dewpoint depression. -! A biquadratic interpolation is done between values in a lookup table -! computed in gtlcl. see documentation for ftlclx for details. -! Input values outside table range are reset to table extrema. -! The interpolation accuracy is better than 0.000003 Kelvin. -! On the Cray, ftlclq is ? times faster than exact calculation. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 1999-03-01 Iredell f90 module -! -! Usage: tlcl=ftlclq(t,tdpd) -! -! Input argument list: -! t Real(krealfp) LCL temperature in Kelvin -! tdpd Real(krealfp) dewpoint depression in Kelvin -! -! Output argument list: -! ftlcl Real(krealfp) temperature at the LCL in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftlclq - real(krealfp),intent(in):: t,tdpd - integer jx,jy - real(krealfp) xj,yj,dxj,dyj - real(krealfp) ft11,ft12,ft13,ft21,ft22,ft23,ft31,ft32,ft33 - real(krealfp) ftx1,ftx2,ftx3 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - xj=min(max(c1xtlcl+c2xtlcl*t,1._krealfp),real(nxtlcl,krealfp)) - yj=min(max(c1ytlcl+c2ytlcl*tdpd,1._krealfp),real(nytlcl,krealfp)) - jx=min(max(nint(xj),2),nxtlcl-1) - jy=min(max(nint(yj),2),nytlcl-1) - dxj=xj-jx - dyj=yj-jy - ft11=tbtlcl(jx-1,jy-1) - ft12=tbtlcl(jx-1,jy) - ft13=tbtlcl(jx-1,jy+1) - ft21=tbtlcl(jx,jy-1) - ft22=tbtlcl(jx,jy) - ft23=tbtlcl(jx,jy+1) - ft31=tbtlcl(jx+1,jy-1) - ft32=tbtlcl(jx+1,jy) - ft33=tbtlcl(jx+1,jy+1) - ftx1=(((ft31+ft11)/2-ft21)*dxj+(ft31-ft11)/2)*dxj+ft21 - ftx2=(((ft32+ft12)/2-ft22)*dxj+(ft32-ft12)/2)*dxj+ft22 - ftx3=(((ft33+ft13)/2-ft23)*dxj+(ft33-ft13)/2)*dxj+ft23 - ftlclq=(((ftx3+ftx1)/2-ftx2)*dyj+(ftx3-ftx1)/2)*dyj+ftx2 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - function ftlclo(t,tdpd) -!$$$ Subprogram documentation block -! -! Subprogram: ftlclo Compute LCL temperature. -! Author: Phillips org: w/NMC2X2 Date: 29 dec 82 -! -! Abstract: Compute temperature at the lifting condensation level -! from temperature and dewpoint depression. the formula used is -! a polynomial taken from Phillips mstadb routine which empirically -! approximates the original exact implicit relationship. -! (This kind of approximation is customary (inman, 1969), but -! the original source for this particular one is not yet known. -MI) -! Its accuracy is about 0.03 Kelvin for a dewpoint depression of 30. -! This function should be expanded inline in the calling routine. -! -! Program History Log: -! 91-05-07 Iredell made into inlinable function -! 1999-03-01 Iredell f90 module -! -! Usage: tlcl=ftlclo(t,tdpd) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! tdpd Real(krealfp) dewpoint depression in Kelvin -! -! Output argument list: -! ftlclo Real(krealfp) temperature at the LCL in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftlclo - real(krealfp),intent(in):: t,tdpd - real(krealfp),parameter:: clcl1= 0.954442e+0,clcl2= 0.967772e-3,& - clcl3=-0.710321e-3,clcl4=-0.270742e-5 -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ftlclo=t-tdpd*(clcl1+clcl2*t+tdpd*(clcl3+clcl4*t)) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - elemental function ftlclx(t,tdpd) -!$$$ Subprogram documentation block -! -! Subprogram: ftlclx Compute LCL temperature. -! Author: Iredell org: w/NMC2X2 Date: 25 March 1999 -! -! Abstract: Compute temperature at the lifting condensation level -! from temperature and dewpoint depression. A parcel lifted -! adiabatically becomes saturated at the lifting condensation level. -! The water model assumes a perfect gas, constant specific heats -! for gas and liquid, and neglects the volume of the liquid. -! The model does account for the variation of the latent heat -! of condensation with temperature. The ice option is not included. -! The Clausius-Clapeyron equation is integrated from the triple point -! to get the formulas -! pvlcl=con_psat*(trlcl**xa)*exp(xb*(1.-trlcl)) -! pvdew=con_psat*(trdew**xa)*exp(xb*(1.-trdew)) -! where pvlcl is the saturated parcel vapor pressure at the LCL, -! pvdew is the unsaturated parcel vapor pressure initially, -! trlcl is ttp/tlcl and trdew is ttp/tdew. The adiabatic lifting -! of the parcel is represented by the following formula -! pvdew=pvlcl*(t/tlcl)**(1/kappa) -! This formula is inverted by iterating Newtonian approximations -! until tlcl is found to within 1.e-6 Kelvin. Note that the minimum -! returned temperature is 180 Kelvin. -! -! Program History Log: -! 1999-03-25 Iredell -! -! Usage: tlcl=ftlclx(t,tdpd) -! -! Input argument list: -! t Real(krealfp) temperature in Kelvin -! tdpd Real(krealfp) dewpoint depression in Kelvin -! -! Output argument list: -! ftlclx Real(krealfp) temperature at the LCL in Kelvin -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none - real(krealfp) ftlclx - real(krealfp),intent(in):: t,tdpd - real(krealfp),parameter:: terrm=1.e-4,tlmin=180.,tlminx=tlmin-5. - real(krealfp) tr,pvdew,tlcl,ta,pvlcl,el,dpvlcl,terr - integer i -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - tr=con_ttp/(t-tdpd) - pvdew=con_psat*(tr**con_xpona)*exp(con_xponb*(1.-tr)) - tlcl=t-tdpd - do i=1,100 - tr=con_ttp/tlcl - ta=t/tlcl - pvlcl=con_psat*(tr**con_xpona)*exp(con_xponb*(1.-tr))*ta**(1/con_rocp) - el=con_hvap+con_dldt*(tlcl-con_ttp) - dpvlcl=(el/(con_rv*t**2)+1/(con_rocp*tlcl))*pvlcl - terr=(pvlcl-pvdew)/dpvlcl - tlcl=tlcl-terr - if(abs(terr).le.terrm.or.tlcl.lt.tlminx) exit - enddo - ftlclx=max(tlcl,tlmin) -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end function -!------------------------------------------------------------------------------- - subroutine gfuncphys -!$$$ Subprogram Documentation Block -! -! Subprogram: gfuncphys Compute all physics function tables -! Author: N Phillips w/NMC2X2 Date: 30 dec 82 -! -! Abstract: Compute all physics function tables. Lookup tables are -! set up for computing saturation vapor pressure, dewpoint temperature, -! equivalent potential temperature, moist adiabatic temperature and humidity, -! pressure to the kappa, and lifting condensation level temperature. -! -! Program History Log: -! 1999-03-01 Iredell f90 module -! -! Usage: call gfuncphys -! -! Subprograms called: -! gpvsl compute saturation vapor pressure over liquid table -! gpvsi compute saturation vapor pressure over ice table -! gpvs compute saturation vapor pressure table -! gtdpl compute dewpoint temperature over liquid table -! gtdpi compute dewpoint temperature over ice table -! gtdp compute dewpoint temperature table -! gthe compute equivalent potential temperature table -! gtma compute moist adiabat tables -! gpkap compute pressure to the kappa table -! grkap compute pressure to the 1/kappa table -! gtlcl compute LCL temperature table -! -! Attributes: -! Language: Fortran 90. -! -!$$$ - implicit none -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - call gpvsl - call gpvsi - call gpvs - call gtdpl - call gtdpi - call gtdp - call gthe - call gtma - call gpkap - call grkap - call gtlcl -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - end subroutine -!------------------------------------------------------------------------------- -end module diff --git a/sorc/gfs_bufr.fd/gfsbufr.f b/sorc/gfs_bufr.fd/gfsbufr.f deleted file mode 100755 index e6e3d06517..0000000000 --- a/sorc/gfs_bufr.fd/gfsbufr.f +++ /dev/null @@ -1,276 +0,0 @@ - program meteormrf -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C -C MAIN PROGRAM: METEOMRF -C PRGMMR: PAN ORG: NP23 DATE: 1999-07-21 -C -C ABSTRACT: Creates BUFR meteogram files for the AVN and MRF. -C -C PROGRAM HISTORY LOG: -C 99-07-21 Hualu Pan -C 16-09-27 HUIYA CHUANG MODIFY TO READ GFS NEMS OUTPUT ON GRID SPACE -C 16-10-15 HUIYA CHUANG: CONSOLIDATE TO READ FLUX FIELDS IN THIS -C PACKAGE TOO AND THIS SPEEDS UP BFS BUFR BY 3X -C 17-02-27 GUANG PING LOU: CHANGE MODEL OUTPUT READ-IN TO HOURLY -C TO 120 HOURS AND 3 HOURLY TO 180 HOURS. -C 19-07-16 GUANG PING LOU: CHANGE FROM NEMSIO TO GRIB2. -C -C -C USAGE: -C INPUT FILES: -C FTxxF001 - UNITS 11 THRU 49 -C PARM - UNIT 5 (STANDARD READ) -C -C OUTPUT FILES: (INCLUDING SCRATCH FILES) -C FTxxF001 - UNITS 51 THRU 79 -C FTxxF001 - UNIT 6 (STANDARD PRINTFILE) -C -C SUBPROGRAMS CALLED: (LIST ALL CALLED FROM ANYWHERE IN CODES) -C UNIQUE: - ROUTINES THAT ACCOMPANY SOURCE FOR COMPILE -C LIBRARY: -C W3LIB - -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN -C =NNNN - TROUBLE OR SPECIAL FLAG - SPECIFY NATURE -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C LANGUAGE: INDICATE EXTENSIONS, COMPILER OPTIONS -C MACHINE: IBM SP -C -C$$$ - use netcdf - use mpi - use nemsio_module - use sigio_module - implicit none -!! include 'mpif.h' - integer,parameter:: nsta=3000 - integer,parameter:: ifile=11 - integer,parameter:: levso=64 - integer(sigio_intkind):: irets - type(nemsio_gfile) :: gfile - integer ncfsig, nsig - integer istat(nsta), idate(4), jdate - integer :: levs,nstart,nend,nint,nsfc,levsi,im,jm - integer :: npoint,np,ist,is,iret,lss,nss,nf,nsk,nfile - integer :: ielev - integer :: lsfc - real :: alat,alon,rla,rlo - real :: wrkd(1),dummy - real rlat(nsta), rlon(nsta), elevstn(nsta) - integer iidum(nsta),jjdum(nsta) - integer nint1, nend1, nint3, nend3, np1 - integer landwater(nsta) - character*1 ns, ew - character*4 t3 - character*4 cstat(nsta) - character*32 desc - character*150 dird, fnsig - logical f00, makebufr - CHARACTER*150 FILESEQ - CHARACTER*8 SBSET - LOGICAL SEQFLG(4) - CHARACTER*80 CLIST(4) - INTEGER NPP(4) - CHARACTER*8 SEQNAM(4) - integer ierr, mrank, msize,ntask - integer n0, ntot - integer :: error, ncid, id_var,dimid - character(len=10) :: dim_nam - character(len=6) :: fformat - !added from Cory - integer :: iope, ionproc - integer, allocatable :: iocomms(:) -C - DATA SBSET / 'ABCD1234' / -C - DATA SEQFLG / .FALSE., .TRUE., .FALSE., .FALSE. / -C - DATA SEQNAM / 'HEADR', 'PROFILE', 'CLS1' ,'D10M' / -c DATA SEQNAM / 'HEADR', 'PRES TMDB UWND VWND SPFH OMEG', -c & 'CLS1' ,'D10M' / -C - namelist /nammet/ levs, makebufr, dird, - & nstart, nend, nint, nend1, nint1, - & nint3, nsfc, f00, fformat, np1 - - call mpi_init(ierr) - call mpi_comm_rank(MPI_COMM_WORLD,mrank,ierr) - call mpi_comm_size(MPI_COMM_WORLD,msize,ierr) - if(mrank.eq.0) then - CALL W3TAGB('METEOMRF',1999,0202,0087,'NP23') - endif - open(5,file='gfsparm') - read(5,nammet) - write(6,nammet) - npoint = 0 - 99 FORMAT (I6, F6.2,A1, F7.2,A1,1X,A4,1X,I2, A28, I4) - do np = 1, nsta+2 - read(8,99,end=200) IST,ALAT,NS,ALON,EW,T3,lsfc,DESC,IELEV -CC print*," IST,ALAT,NS,ALON,EW,T3,lsfc,DESC,IELEV= " -CC print*, IST,ALAT,NS,ALON,EW,T3,lsfc,DESC,IELEV - if(alat.lt.95.) then - npoint = npoint + 1 - RLA = 9999. - IF (NS .EQ. 'N') RLA = ALAT - IF (NS .EQ. 'S') RLA = -ALAT - RLO = 9999. - IF (EW .EQ. 'E') RLO = ALON - IF (EW .EQ. 'W') RLO = -ALON - rlat(npoint) = rla - rlon(npoint) = rlo - istat(npoint) = ist - cstat(npoint) = T3 - elevstn(npoint) = ielev - - if(lsfc .le. 9) then - landwater(npoint) = 2 !!nearest - else if(lsfc .le. 19) then - landwater(npoint) = 1 !!land - else if(lsfc .ge. 20) then - landwater(npoint) = 0 !!water - endif - endif - enddo - 200 continue - if(npoint.le.0) then - print *, ' station list file is empty, abort program' - call abort - elseif(npoint.gt.nsta) then - print *, ' number of station exceeds nsta, abort program' - call abort - endif -! print*,'npoint= ', npoint -! print*,'np,IST,idum,jdum,rlat(np),rlon(np)= ' - if(np1 == 0) then - do np = 1, npoint - read(7,98) IST, iidum(np), jjdum(np), ALAT, ALON - enddo - endif - 98 FORMAT (3I6, 2F9.2) - if (mrank.eq.0.and.makebufr) then - REWIND 1 - READ (1,100) SBSET - 100 FORMAT ( ////// 2X, A8 ) - PRINT 120, SBSET - 120 FORMAT ( ' SBSET=#', A8, '#' ) - REWIND 1 -C -C READ PARM NAMES AND NUMBER OF PARM NAMES FROM BUFR TABLE. - DO IS = 1,4 - CALL BFRHDR ( 1, SEQNAM(IS), SEQFLG(IS), - X CLIST(IS), NPP(IS), IRET ) - IF ( IRET .NE. 0 ) THEN - PRINT*, ' CALL BFRHDR IRET=', IRET - ENDIF - ENDDO - lss = len ( dird ) - DO WHILE ( dird (lss:lss) .eq. ' ' ) - lss = lss - 1 - END DO -C - endif - nsig = 11 - nss = nstart + nint - if(f00) nss = nstart -c do nf = nss, nend, nint - ntot = (nend - nss) / nint + 1 - ntask = mrank/(float(msize)/float(ntot)) - nf = ntask * nint + nss - print*,'n0 ntot nint nss mrank msize' - print*, n0,ntot,nint,nss,mrank,msize - print*,'nf, ntask= ', nf, ntask - if(nf .le. nend1) then - nfile = 21 + (nf / nint1) - else - nfile = 21 + (nend1/nint1) + (nf-nend1)/nint3 - endif - print*, 'nf,nint,nfile = ',nf,nint,nfile - if(nf.le.nend) then - if(nf.lt.10) then - fnsig = 'sigf0' - write(fnsig(6:6),'(i1)') nf - ncfsig = 6 - elseif(nf.lt.100) then - fnsig = 'sigf' - write(fnsig(5:6),'(i2)') nf - ncfsig = 6 - else - fnsig = 'sigf' - write(fnsig(5:7),'(i3)') nf - ncfsig = 7 - endif - print *, 'Opening file : ',fnsig - -!! read in either nemsio or NetCDF files - if (fformat == 'netcdf') then - error=nf90_open(trim(fnsig),nf90_nowrite,ncid) - error=nf90_inq_dimid(ncid,"grid_xt",dimid) - error=nf90_inquire_dimension(ncid,dimid,dim_nam,im) - error=nf90_inq_dimid(ncid,"grid_yt",dimid) - error=nf90_inquire_dimension(ncid,dimid,dim_nam,jm) - error=nf90_inq_dimid(ncid,"pfull",dimid) - error=nf90_inquire_dimension(ncid,dimid,dim_nam,levsi) - error=nf90_close(ncid) - print*,'NetCDF file im,jm,lm= ',im,jm,levs,levsi - - else - call nemsio_init(iret=irets) - print *,'nemsio_init, iret=',irets - call nemsio_open(gfile,trim(fnsig),'read',iret=irets) - if ( irets /= 0 ) then - print*,"fail to open nems atmos file";stop - endif - - call nemsio_getfilehead(gfile,iret=irets - & ,dimx=im,dimy=jm,dimz=levsi) - if( irets /= 0 ) then - print*,'error finding model dimensions '; stop - endif - print*,'nemsio file im,jm,lm= ',im,jm,levsi - call nemsio_close(gfile,iret=irets) - endif - allocate (iocomms(0:ntot)) - if (fformat == 'netcdf') then - print*,'iocomms= ', iocomms - call mpi_comm_split(MPI_COMM_WORLD,ntask,0,iocomms(ntask),ierr) - call mpi_comm_rank(iocomms(ntask), iope, ierr) - call mpi_comm_size(iocomms(ntask), ionproc, ierr) - - call meteorg(npoint,rlat,rlon,istat,cstat,elevstn, - & nf,nfile,fnsig,jdate,idate, - & levsi,im,jm,nsfc, - & landwater,nend1, nint1, nint3, iidum,jjdum,np1, - & fformat,iocomms(ntask),iope,ionproc) - call mpi_barrier(iocomms(ntask), ierr) - call mpi_comm_free(iocomms(ntask), ierr) - else -!! For nemsio input - call meteorg(npoint,rlat,rlon,istat,cstat,elevstn, - & nf,nfile,fnsig,jdate,idate, - & levs,im,jm,nsfc, - & landwater,nend1, nint1, nint3, iidum,jjdum,np1, - & fformat,iocomms(ntask),iope,ionproc) - endif - endif - call mpi_barrier(mpi_comm_world,ierr) - call mpi_finalize(ierr) - if(mrank.eq.0) then - print *, ' starting to make bufr files' - print *, ' makebufr= ', makebufr - print *, 'nint1,nend1,nint3,nend= ',nint1,nend1,nint3,nend -!! idate = 0 7 1 2019 -!! jdate = 2019070100 - - if(makebufr) then - nend3 = nend - call buff(nint1,nend1,nint3,nend3, - & npoint,idate,jdate,levso, - & dird,lss,istat,sbset,seqflg,clist,npp,wrkd) - CALL W3TAGE('METEOMRF') - endif - endif - end diff --git a/sorc/gfs_bufr.fd/gslp.f b/sorc/gfs_bufr.fd/gslp.f deleted file mode 100755 index 5b0eca1f51..0000000000 --- a/sorc/gfs_bufr.fd/gslp.f +++ /dev/null @@ -1,92 +0,0 @@ -!$$$ Subprogram documentation block -! -! Subprogram: gslp Compute sea level pressure as in the GFS -! Prgmmr: Iredell Org: np23 Date: 1999-10-18 -! -! Abstract: This subprogram computes sea level pressure from profile data -! using the Shuell method in the GFS. -! -! Program history log: -! 1999-10-18 Mark Iredell -! -! Usage: call gslp(km,hs,ps,p,t,sh,prmsl,h,ho) -! Input argument list: -! km integer number of levels -! hs real surface height (m) -! ps real surface pressure (Pa) -! p real (km) profile pressures (Pa) -! t real (km) profile temperatures (K) -! sh real (km) profile specific humidities (kg/kg) -! Output argument list: -! prmsl real sea level pressure (Pa) -! h real integer-layer height (m) -! ho real integer-layer height at 1000hPa and 500hPa (m) -! -! Modules used: -! physcons physical constants -! -! Attributes: -! Language: Fortran 90 -! -!$$$ -subroutine gslp(km,hs,ps,p,t,sh,prmsl,h,ho) - use physcons - implicit none - integer,intent(in):: km - real,intent(in):: hs,ps - real,intent(in),dimension(km):: p,t,sh - real,intent(out):: prmsl - real,intent(out),dimension(km):: h - real,intent(out),dimension(2):: ho - real,parameter:: gammam=-6.5e-3,zshul=75.,tvshul=290.66 - real,parameter:: pm1=1.e5,tm1=287.45,hm1=113.,hm2=5572.,& - fslp=con_g*(hm2-hm1)/(con_rd*tm1) - integer k,i - real aps,ap(km),tv(km) - real apo(2) - real tvu,tvd,gammas,part - real hfac -! compute model heights - aps=log(ps) - ap(1)=log(p(1)) - tv(1)=t(1)*(1+con_fvirt*sh(1)) - h(1)=hs-con_rog*tv(1)*(ap(1)-aps) - do k=2,km - ap(k)=log(p(k)) - tv(k)=t(k)*(1+con_fvirt*sh(k)) - h(k)=h(k-1)-con_rog*0.5*(tv(k-1)+tv(k))*(ap(k)-ap(k-1)) - enddo -! compute 1000 and 500 mb heights - apo(1)=log(1000.e2) - apo(2)=log(500.e2) - do i=1,2 - if(aps.lt.apo(i)) then - tvu=tv(1) - if(h(1).gt.zshul) then - tvd=tvu-gammam*h(1) - if(tvd.gt.tvshul) then - if(tvu.gt.tvshul) then - tvd=tvshul-5.e-3*(tvu-tvshul)**2 - else - tvd=tvshul - endif - endif - gammas=(tvu-tvd)/h(1) - else - gammas=0. - endif - part=con_rog*(apo(i)-ap(1)) - ho(i)=h(1)-tvu*part/(1.+0.5*gammas*part) - else - do k=1,km - if(ap(k).lt.apo(i)) then - ho(i)=h(k)-con_rog*tv(k)*(apo(i)-ap(k)) - exit - endif - enddo - endif - enddo -! compute sea level pressure - hfac=ho(1)/(ho(2)-ho(1)) - prmsl=pm1*exp(fslp*hfac) -end subroutine diff --git a/sorc/gfs_bufr.fd/lcl.f b/sorc/gfs_bufr.fd/lcl.f deleted file mode 100755 index 5fa4c4719e..0000000000 --- a/sorc/gfs_bufr.fd/lcl.f +++ /dev/null @@ -1,45 +0,0 @@ - SUBROUTINE LCL(TLCL,PLCL,T,P,Q) -C -C LIFTING CONDENSATION LEVEL ROUTINE -C - REAL L0, KAPPA - parameter (dtdp=4.5e-4,kappa=.286,g=9.81) - parameter (cp=1004.6,cl=4185.5,cpv=1846.0) - parameter (rv=461.5,l0=2.500e6,t0=273.16,es0=610.78) - parameter (cps=2106.0,hfus=3.3358e5,rd=287.05) - parameter (fact1=(CPV - CL) / RV,fact1i=(cps-cl)/rv) - parameter (fact2=(L0 + (CL - CPV) * T0) / RV) - parameter (fact2i=(L0 + hfus + (CL - cps) * T0) / RV) - parameter (fact3=1. / T0,eps=rd/rv,tmix=t0-20.) - DESDT(ES,T) = ES * (FACT1 / T + FACT2 / T ** 2) - DESDTi(ES,T) = ES * (FACT1i / T + FACT2i / T ** 2) - ITER = 0 - CALL TDEW(TG,T,Q,P) - 5 CALL SVP(QS,ES,P,TG) - DES = DESDT(ES,TG) - if(tg.ge.t0) then - des = desdt(es,tg) - elseif(tg.lt.tmix) then - des = desdti(es,tg) - else - w = (tg - tmix) / (t0 - tmix) - des = w * desdt(es,tg) + (1.-w) * desdti(es,tg) - endif - FT = P * (TG / T) ** KAPPA - DFT = KAPPA * FT / TG - GT = (EPS + Q * (1. - EPS)) * ES - Q * FT - DGT = (EPS + Q * (1. - EPS)) * DES - Q * DFT - DTG = GT / DGT -c WRITE(6,*) ' ITER, DTG =', ITER, DTG - TG = TG - DTG - IF(ABS(DTG).LT..1) GOTO 10 - ITER = ITER + 1 - IF(ITER.GT.30) THEN - WRITE(6,*) ' LCL ITERATION DIVERGES' - STOP 'ABEND 101' - ENDIF - GOTO 5 - 10 TLCL = TG - PLCL = P * (TLCL / T) ** KAPPA - RETURN - END diff --git a/sorc/gfs_bufr.fd/machine.f b/sorc/gfs_bufr.fd/machine.f deleted file mode 100755 index bec00028ad..0000000000 --- a/sorc/gfs_bufr.fd/machine.f +++ /dev/null @@ -1,15 +0,0 @@ - MODULE MACHINE - - IMPLICIT NONE - SAVE -! Machine dependant constants - integer kind_io4,kind_io8,kind_phys,kind_rad - parameter (kind_rad = selected_real_kind(13,60)) ! the '60' maps to 64-bit real - parameter (kind_phys = selected_real_kind(13,60)) ! the '60' maps to 64-bit real - parameter (kind_io4 = 4) -! parameter (kind_io8 = 8) - parameter (kind_io8 = 4) - integer kint_mpi - parameter (kint_mpi = 4) - - END MODULE MACHINE diff --git a/sorc/gfs_bufr.fd/makefile_module b/sorc/gfs_bufr.fd/makefile_module deleted file mode 100755 index d9d5374a7a..0000000000 --- a/sorc/gfs_bufr.fd/makefile_module +++ /dev/null @@ -1,79 +0,0 @@ -##################################################################################### -# gfs_bufr using module compile standard -# # 11/08/2019 guang.ping.lou@noaa.gov: Create NetCDF version -# ##################################################################################### -# set -eux -# - -FC = $(myFC) $(myFCFLAGS) -CPP = $(myCPP) $(myCPPFLAGS) - -FFLAGS = -I$(NETCDF_INCLUDES) \ - -I$(NEMSIO_INC) \ - -I$(SIGIO_INC) \ - -I$(W3EMC_INC4) - -LIBS = -L$(NETCDF_LIBRARIES) -lnetcdff -lnetcdf \ - -L$(HDF5_LIBRARIES) -lhdf5_hl -lhdf5 -lz \ - $(NEMSIO_LIB) \ - $(W3EMC_LIB4) \ - $(W3NCO_LIB4) \ - $(BUFR_LIB4) \ - $(BACIO_LIB4) \ - $(SP_LIB4) \ - $(SIGIO_LIB) - -SRCM = gfsbufr.f -OBJS = physcons.o funcphys.o meteorg.o bfrhdr.o newsig1.o terp3.o\ - bfrize.o vintg.o buff.o rsearch.o \ - svp.o calpreciptype.o lcl.o mstadb.o tdew.o\ - machine.o gslp.o modstuff1.o read_nemsio.o read_netcdf_p.o - -CMD = ../../exec/gfs_bufr - -$(CMD): $(SRCM) $(OBJS) - $(FC) $(FFLAGS) $(SRCM) $(OBJS) $(LIBS) -o $(CMD) - -machine.o: machine.f - $(FC) $(FFLAGS) -free -c machine.f -physcons.o: physcons.f machine.o - $(FC) $(FFLAGS) -free -c physcons.f -funcphys.o: funcphys.f physcons.o - $(FC) $(FFLAGS) -free -c funcphys.f -gslp.o: gslp.f - $(FC) $(FFLAGS) -free -c gslp.f -modstuff1.o: modstuff1.f - $(FC) $(INC) $(FFLAGS) -free -c modstuff1.f -meteorg.o: meteorg.f physcons.o funcphys.o - $(FC) $(INC) $(FFLAGS) -c meteorg.f -read_netcdf_p.o: read_netcdf_p.f - $(FC) $(INC) $(FFLAGS) -c read_netcdf_p.f -read_nemsio.o: read_nemsio.f - $(FC) $(INC) $(FFLAGS) -c read_nemsio.f -bfrhdr.o: bfrhdr.f - $(FC) $(FFLAGS) -c bfrhdr.f -newsig1.o: newsig1.f - $(FC) $(FFLAGS) -c newsig1.f -terp3.o: terp3.f - $(FC) $(FFLAGS) -c terp3.f -bfrize.o: bfrize.f - $(FC) $(FFLAGS) -c bfrize.f -vintg.o: vintg.f - $(FC) $(FFLAGS) -c vintg.f -buff.o: buff.f - $(FC) $(FFLAGS) -c buff.f -rsearch.o: rsearch.f - $(FC) $(FFLAGS) -c rsearch.f -svp.o: svp.f - $(FC) $(FFLAGS) -c svp.f -calpreciptype.o: calpreciptype.f physcons.o funcphys.o - $(FC) $(FFLAGS) -FR -c calpreciptype.f -lcl.o: lcl.f - $(FC) $(FFLAGS) -c lcl.f -mstadb.o: mstadb.f - $(FC) $(FFLAGS) -c mstadb.f -tdew.o: tdew.f - $(FC) $(FFLAGS) -c tdew.f - -clean: - /bin/rm -f $(OBJS) *.mod gfs_bufr diff --git a/sorc/gfs_bufr.fd/meteorg.f b/sorc/gfs_bufr.fd/meteorg.f deleted file mode 100755 index 6b7c2c7db4..0000000000 --- a/sorc/gfs_bufr.fd/meteorg.f +++ /dev/null @@ -1,1326 +0,0 @@ - subroutine meteorg(npoint,rlat,rlon,istat,cstat,elevstn, - & nf,nfile,fnsig,jdate,idate, - & levs,im,jm,kdim, - & landwater,nend1,nint1,nint3,iidum,jjdum,np1, - & fformat,iocomms,iope,ionproc) - -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! . . . . -! SUBPROGRAM: meteorg -! PRGMMR: HUALU PAN ORG: W/NMC23 DATE: 1999-07-21 -! -! ABSTRACT: Creates BUFR meteogram files for the AVN and MRF. -! -! PROGRAM HISTORY LOG: -! 1999-07-21 HUALU PAN -! 2007-02-02 FANGLIN YANG EXPAND FOR HYBRID COORDINATES USING SIGIO -! 2009-07-24 FANGLIN YANG CHANGE OUTPUT PRESSURE TO INTEGER-LAYER -! PRESSURE (line 290) -! CORRECT THE TEMPERATURE ADJUSTMENT (line 238) -! 2014-03-27 DANA CARLIS UNIFY CODE WITH GFS FORECAST MODEL PRECIP -! TYPE CALCULATION -! 2016-09-27 HUIYA CHUANG MODIFY TO READ GFS NEMS OUTPUT ON GRID SPACE -! 2017-02-27 GUANG PING LOU CHANGE OUTPUT PRECIPITATION TO HOURLY AMOUNT -! TO 120 HOURS AND 3 HOURLY TO 180 HOURS. -! 2018-02-01 GUANG PING LOU INGEST FV3GFS NEMSIO ACCUMULATED PRECIPITATION -! AND RECALCULATE HOURLY AND 3 HOURLY OUTPUT DEPENDING -! ON LOGICAL VALUE OF precip_accu. -! 2018-02-08 GUANG PING LOU ADDED READING IN AND USING DZDT AS VERTICAL VELOCITY -! 2018-02-16 GUANG PING LOU ADDED READING IN AND USING MODEL DELP AND DELZ -! 2018-02-21 GUANG PING LOU THIS VERSION IS BACKWARD COMPATIBLE TO GFS MODEL -! 2018-03-27 GUANG PING LOU CHANGE STATION ELEVATION CORRECTION LAPSE RATE FROM 0.01 TO 0.0065 -! 2018-03-28 GUANG PING LOU GENERALIZE TIME INTERVAL -! 2019-07-08 GUANG PING LOU ADDED STATION CHARACTER IDS -! 2019-10-08 GUANG PING LOU MODIFY TO READ IN NetCDF FILES. RETAIN NEMSIO -! RELATED CALLS AND CLEAN UP THE CODE. -! 2020-04-24 GUANG PING LOU Clean up code and remove station height -! adjustment -! -! USAGE: CALL PROGRAM meteorg -! INPUT: -! npoint - number of points -! rlat(npint) - latitude -! rlon(npoint) - longtitude -! istat(npoint) - station id -! elevstn(npoint) - station elevation (m) -! nf - forecast cycle -! fnsig - sigma file name -! idate(4) - date -! levs - input vertical layers -! kdim - sfc file dimension -! -! OUTPUT: -! nfile - output data file channel -! jdate - date YYYYMMDDHH -! -! ATTRIBUTES: -! LANGUAGE: -! MACHINE: IBM SP -! -!$$$ - use netcdf - use nemsio_module - use sigio_module - use physcons - use mersenne_twister - use funcphys - implicit none - include 'mpif.h' - type(nemsio_gfile) :: gfile - type(nemsio_gfile) :: ffile - type(nemsio_gfile) :: ffile2 - integer :: nfile,npoint,levs,kdim - integer :: nfile1 - integer :: i,j,im,jm,kk,idum,jdum,idvc,idsl -! idsl Integer(sigio_intkind) semi-lagrangian id -! idvc Integer(sigio_intkind) vertical coordinate id -! (=1 for sigma, =2 for ec-hybrid, =3 for ncep hybrid) - integer,parameter :: nvcoord=2 - integer,parameter :: levso=64 - integer :: idate(4),nij,nflx2,np,k,l,nf,nfhour,np1 - integer :: idate_nems(7) - integer :: iret,jdate,leveta,lm,lp1 - character*150 :: fnsig,fngrib -!! real*8 :: data(6*levs+25) - real*8 :: data2(6*levso+25) - real*8 :: rstat1 - character*8 :: cstat1 - character*4 :: cstat(npoint) - real :: fhour,pp,ppn,qs,qsn,esn,es,psfc,ppi,dtemp,nd - real :: t,q,u,v,td,tlcl,plcl,qw,tw,xlat,xlon - integer,dimension(npoint):: landwater - integer,dimension(im,jm):: lwmask - real,dimension(im,jm):: apcp, cpcp - real,dimension(npoint,2+levs*3):: grids - real,dimension(npoint) :: rlat,rlon,pmsl,ps,psn,elevstn - real,dimension(1) :: psone - real,dimension(im*jm) :: dum1d,dum1d2 - real,dimension(im,jm) :: gdlat, hgt, gdlon - real,dimension(im,jm,15) :: dum2d - real,dimension(im,jm,levs) :: t3d, q3d, uh, vh,omega3d - real,dimension(im,jm,levs) :: delpz - real,dimension(im,jm,levs+1) :: pint, zint - real,dimension(npoint,levs) :: gridu,gridv,omega,qnew,zp - real,dimension(npoint,levs) :: p1,pd3,ttnew - real,dimension(npoint,levs) :: z1 - real,dimension(npoint,levs+1) :: pi3 - real :: zp2(2) - real,dimension(kdim,npoint) :: sfc - real,dimension(1,levs+1) :: prsi,phii - real,dimension(1,levs) :: gt0,gq0,prsl,phy_f3d - real :: PREC,TSKIN,SR,randomno(1,2) - real :: DOMR,DOMZR,DOMIP,DOMS - real :: vcoord(levs+1,nvcoord),vdummy(levs+1) - real :: vcoordnems(levs+1,3,2) - real :: rdum - integer :: n3dfercld,iseedl - integer :: istat(npoint) - logical :: trace -!! logical, parameter :: debugprint=.true. - logical, parameter :: debugprint=.false. - character lprecip_accu*3 - real, parameter :: ERAD=6.371E6 - real, parameter :: DTR=3.1415926/180. - real :: ap - integer :: nf1, fint - integer :: nend1, nint1, nint3 - character*150 :: fngrib2 - integer recn_dpres,recn_delz,recn_dzdt - integer :: jrec - equivalence (cstat1,rstat1) - integer iidum(npoint),jjdum(npoint) - integer :: error, ncid, ncid2, id_var,dimid - character(len=100) :: long_name - character(len=6) :: fformat - integer,dimension(8) :: clocking - character(10) :: date - character(12) :: time - character(7) :: zone - character(3) :: Zreverse - character(20) :: VarName,LayName - integer iocomms,iope,ionproc - - nij = 12 -!! nflx = 6 * levs - nflx2 = 6 * levso - recn_dpres = 0 - recn_delz = 0 - recn_dzdt = 0 - jrec = 0 - lprecip_accu='yes' - - idvc=2 - idsl=1 -!read in NetCDF file header info - print*,"fformat= ", fformat -! print*,'meteorg.f, idum,jdum= ' -! do np = 1, npoint -! print*, iidum(np), jjdum(np) -! enddo - - if(fformat .eq. "netcdf") then - print*,'iocomms inside meteorg.f=', iocomms - error=nf90_open(trim(fnsig),ior(nf90_nowrite,nf90_mpiio), - & ncid,comm=iocomms, info = mpi_info_null) - error=nf90_get_att(ncid,nf90_global,"ak",vdummy) - do k = 1, levs+1 - vcoord(k,1)=vdummy(levs-k+1) - enddo - error=nf90_get_att(ncid,nf90_global,"bk",vdummy) - do k = 1, levs+1 - vcoord(k,2)=vdummy(levs-k+1) - enddo - error=nf90_inq_varid(ncid, "time", id_var) - error=nf90_get_var(ncid, id_var, nfhour) - print*, "nfhour:",nfhour - error=nf90_get_att(ncid,id_var,"units",long_name) -!! print*,'time units',' -- ',trim(long_name) - read(long_name(13:16),"(i4)")idate(4) - read(long_name(18:19),"(i2)")idate(2) - read(long_name(21:22),"(i2)")idate(3) - read(long_name(24:25),"(i2)")idate(1) - fhour=float(nfhour) - print*,'date= ', idate - jdate = idate(4)*1000000 + idate(2)*10000+ - & idate(3)*100 + idate(1) - print *, 'jdate = ', jdate - error=nf90_inq_varid(ncid, "lon", id_var) - error=nf90_get_var(ncid, id_var, gdlon) - error=nf90_inq_varid(ncid, "lat", id_var) - error=nf90_get_var(ncid, id_var, gdlat) -!!end read NetCDF hearder info, read nemsio below if necessary - else - - call nemsio_open(gfile,trim(fnsig),'read',iret=iret) - call nemsio_getfilehead(gfile,iret=iret - + ,idate=idate_nems(1:7),nfhour=nfhour - + ,idvc=idvc,idsl=idsl,lat=dum1d,lon=dum1d2 - + ,vcoord=vcoordnems) - - do k=1,levs+1 - vcoord(k,1)=vcoordnems(k,1,1) - vcoord(k,2)=vcoordnems(k,2,1) - end do - idate(1)=idate_nems(4) - idate(2)=idate_nems(2) - idate(3)=idate_nems(3) - idate(4)=idate_nems(1) - fhour=float(nfhour) - print *, ' processing forecast hour ', fhour - print *, ' idate =', idate - jdate = idate(4)*1000000 + idate(2)*10000+ - & idate(3)*100 + idate(1) - print *, 'jdate = ', jdate - print *, 'Total number of stations = ', npoint - ap = 0.0 - do j=1,jm - do i=1,im - gdlat(i,j)=dum1d((j-1)*im+i) - gdlon(i,j)=dum1d2((j-1)*im+i) - end do - end do - - endif !end read in nemsio hearder - - if(debugprint) then - do k=1,levs+1 - print*,'vcoord(k,1)= ', k, vcoord(k,1) - end do - do k=1,levs+1 - print*,'vcoord(k,2)= ', k, vcoord(k,2) - end do - print*,'sample lat= ',gdlat(im/5,jm/4) - + ,gdlat(im/5,jm/3),gdlat(im/5,jm/2) - print*,'sample lon= ',gdlon(im/5,jm/4) - + ,gdlon(im/5,jm/3),gdlon(im/5,jm/2) - endif -! topography - if (fformat == 'netcdf') then - VarName='hgtsfc' - Zreverse='yes' - call read_netcdf_p(ncid,im,jm,1,VarName,hgt,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'surface hgt not found' - else - VarName='hgt' - LayName='sfc' - call read_nemsio(gfile,im,jm,1,VarName,LayName,hgt, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'surface hgt not found' - endif - if(debugprint)print*,'sample sfc h= ',hgt(im/5,jm/4) - + ,hgt(im/5,jm/3),hgt(im/5,jm/2) - -! surface pressure (Pa) - if (fformat == 'netcdf') then - VarName='pressfc' - Zreverse='yes' - call read_netcdf_p(ncid,im,jm,1,VarName,pint(:,:,1), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'surface pressure not found' - else - VarName='pres' - LayName='sfc' - call read_nemsio(gfile,im,jm,1,VarName, - & LayName,pint(:,:,1),error) - if (error /= 0) print*,'surface pressure not found' - endif - if(debugprint)print*,'sample sfc P= ',pint(im/2,jm/4,1), - + pint(im/2,jm/3,1),pint(im/2,jm/2,1) - -! temperature using NetCDF - if (fformat == 'netcdf') then - VarName='tmp' - Zreverse='yes' - call read_netcdf_p(ncid,im,jm,levs,VarName,t3d,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'temp not found' - else - VarName='tmp' - LayName='mid layer' - call read_nemsio(gfile,im,jm,levs,VarName,LayName,t3d,error) - if (error /= 0) print*,'temp not found' - endif - if(debugprint) then - print*,'sample T at lev=1 to levs ' - do k = 1, levs - print*,k, t3d(im/2,jm/3,k) - enddo - endif -! specific humidity - if (fformat == 'netcdf') then - VarName='spfh' - Zreverse='yes' - call read_netcdf_p(ncid,im,jm,levs,VarName,q3d,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'spfh not found' - else - VarName='spfh' - LayName='mid layer' - call read_nemsio(gfile,im,jm,levs,VarName,LayName,q3d,error) - if (error /= 0) print*,'spfh not found' - endif - if(debugprint) then - print*,'sample Q at lev=1 to levs ' - do k = 1, levs - print*,k, q3d(im/2,jm/3,k) - enddo - endif -! U wind - if (fformat == 'netcdf') then - VarName='ugrd' - Zreverse='yes' - call read_netcdf_p(ncid,im,jm,levs,VarName,uh,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'ugrd not found' - else - VarName='ugrd' - LayName='mid layer' - call read_nemsio(gfile,im,jm,levs,VarName,LayName,uh,error) - if (error /= 0) print*,'ugrd not found' - endif - if(debugprint) then - print*,'sample U at lev=1 to levs ' - do k = 1, levs - print*,k, uh(im/2,jm/3,k) - enddo - endif -! V wind - if (fformat == 'netcdf') then - VarName='vgrd' - Zreverse='yes' - call read_netcdf_p(ncid,im,jm,levs,VarName,vh,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'vgrd not found' - else - VarName='vgrd' - LayName='mid layer' - call read_nemsio(gfile,im,jm,levs,VarName,LayName,vh,error) - if (error /= 0) print*,'vgrd not found' - endif - if(debugprint) then - print*,'sample V at lev=1 to levs ' - do k = 1, levs - print*,k, vh(im/2,jm/3,k) - enddo - endif -! dzdt !added by Guang Ping Lou for FV3GFS - if (fformat == 'netcdf') then - VarName='dzdt' - Zreverse='yes' - call read_netcdf_p(ncid,im,jm,levs,VarName,omega3d,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'dzdt not found' - else - VarName='dzdt' - LayName='mid layer' - call read_nemsio(gfile,im,jm,levs,VarName,LayName, - & omega3d,error) - if (error /= 0) print*,'dzdt not found' - endif - if(debugprint) then - print*,'sample dzdt at lev=1 to levs ' - do k = 1, levs - print*,k, omega3d(im/2,jm/3,k) - enddo - endif -! dpres !added by Guang Ping Lou for FV3GFS (interface pressure delta) - if (fformat == 'netcdf') then - VarName='dpres' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,levs,VarName,delpz,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'dpres not found' - else - VarName='dpres' - LayName='mid layer' - call read_nemsio(gfile,im,jm,levs,VarName,LayName, - & delpz,error) - if (error /= 0) print*,'dpres not found' - endif - if(debugprint) then - print*,'sample delp at lev=1 to levs ' - do k = 1, levs - print*,k, delpz(im/2,jm/3,k) - enddo - endif -! compute interface pressure - if(recn_dpres == -9999) then - do k=2,levs+1 - do j=1,jm - do i=1,im - pint(i,j,k)=vcoord(k,1) - + +vcoord(k,2)*pint(i,j,1) - end do - end do - end do - else -! compute pint using dpres from top down if DZDT is used - if (fformat == 'netcdf') then - do j=1,jm - do i=1,im - pint(i,j,levs+1) = delpz(i,j,1) - end do - end do - do k=levs,2,-1 - kk=levs-k+2 - do j=1,jm - do i=1,im - pint(i,j,k) = pint(i,j,k+1) + delpz(i,j,kk) - end do - end do - end do - else - do k=2,levs+1 - do j=1,jm - do i=1,im - pint(i,j,k) = pint(i,j,k-1) - delpz(i,j,k-1) - end do - end do - end do - endif - if(debugprint) then - print*,'sample interface pressure pint at lev =1 to levs ' - do k = 1, levs+1 - print*,k, pint(im/2,jm/3,k),pint(im/3,jm/8,k) - enddo - endif - endif -! delz !added by Guang Ping Lou for FV3GFS ("height thickness" with unit "meters" bottom up) - if (fformat == 'netcdf') then - VarName='delz' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,levs,VarName,delpz,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'delz not found' - else - VarName='delz' - LayName='mid layer' - call read_nemsio(gfile,im,jm,levs,VarName,LayName,delpz,error) - if (error /= 0) print*,'delz not found' - endif - if(debugprint) then - print*,'sample delz at lev=1 to levs ' - do k = 1, levs - print*,k, delpz(im/2,jm/3,k) - enddo - endif - -! compute interface height (meter) - if(recn_delz == -9999) then - print*, 'using calculated height' - else -! compute zint using delz from bot up if DZDT is used - if (fformat == 'netcdf') then - do j=1,jm - do i=1,im - zint(i,j,1) = 0.0 - end do - end do - do k=2,levs+1 - kk=levs-k+1 - do j=1,jm - do i=1,im - zint(i,j,k) = zint(i,j,k-1) - delpz(i,j,kk) - end do - end do - end do - else - do k=2,levs+1 - do j=1,jm - do i=1,im - zint(i,j,k) = zint(i,j,k-1) + delpz(i,j,k-1) - end do - end do - end do - endif - if(debugprint) then - print*,'sample interface height zint at lev =1 to levs ' - do k = 1, levs+1 - print*,k, zint(im/2,jm/3,k),zint(im/3,jm/8,k) - enddo - endif - endif - -! close up this NetCDF file - error=nf90_close(ncid) - -! Now open up NetCDF surface files - if ( nf .le. nend1 ) then - nf1 = nf - nint1 - else - nf1 = nf - nint3 - endif - if ( nf == 0 ) nf1=0 - if(nf==0) then - fngrib='flxf00' - elseif(nf.lt.10) then - fngrib='flxf0' - write(fngrib(6:6),'(i1)') nf - elseif(nf.lt.100) then - fngrib='flxf' - write(fngrib(5:6),'(i2)') nf - else - fngrib='flxf' - write(fngrib(5:7),'(i3)') nf - endif - if(nf1==0) then - fngrib2='flxf00' - elseif(nf1.lt.10) then - fngrib2='flxf0' - write(fngrib2(6:6),'(i1)') nf1 - elseif(nf1.lt.100) then - fngrib2='flxf' - write(fngrib2(5:6),'(i2)') nf1 - else - fngrib2='flxf' - write(fngrib2(5:7),'(i3)') nf1 - endif - if (fformat == 'netcdf') then - error=nf90_open(trim(fngrib),nf90_nowrite,ncid) -!open T-nint below - error=nf90_open(trim(fngrib2),nf90_nowrite,ncid2) - if(error /= 0)print*,'file not open',trim(fngrib), trim(fngrib2) - else - call nemsio_open(ffile,trim(fngrib),'read',iret=error) - call nemsio_open(ffile2,trim(fngrib2),'read',iret=error) - if(error /= 0)print*,'file not open',trim(fngrib), trim(fngrib2) - endif -! land water mask - if (fformat == 'netcdf') then - VarName='land' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,lwmask,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'lwmask not found' - else - VarName='land' - LayName='sfc' - call read_nemsio(ffile,im,jm,1,VarName,LayName,lwmask,error) - if (error /= 0) print*,'lwmask not found' - endif - if(debugprint) - + print*,'sample land mask= ',lwmask(im/2,jm/4), - + lwmask(im/2,jm/3) - -! surface T - if (fformat == 'netcdf') then - VarName='tmpsfc' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,1), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'tmpsfc not found' - else - VarName='tmp' - LayName='sfc' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - & dum2d(:,:,1),error) - if (error /= 0) print*,'tmpsfc not found' - endif - if(debugprint) - + print*,'sample sfc T= ',dum2d(im/2,jm/4,1),dum2d(im/2,jm/3,1), - + dum2d(im/2,jm/2,1) -! 2m T - if (fformat == 'netcdf') then - VarName='tmp2m' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,2), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'tmp2m not found' - else - VarName='tmp' - LayName='2 m above gnd' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,2),error) - if (error /= 0) print*,'tmp2m not found' - endif - if(debugprint) - + print*,'sample 2m T= ',dum2d(im/2,jm/4,2),dum2d(im/2,jm/3,2), - + dum2d(im/2,jm/2,2) - -! 2m Q - if (fformat == 'netcdf') then - VarName='spfh2m' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,3), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'spfh2m not found' - else - VarName='spfh' - LayName='2 m above gnd' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,3),error) - if (error /= 0) print*,'spfh2m not found' - endif - if(debugprint) - + print*,'sample 2m Q= ',dum2d(im/2,jm/4,3),dum2d(im/2,jm/3,3), - + dum2d(im/2,jm/2,3) - -! U10 - if (fformat == 'netcdf') then - VarName='ugrd10m' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,4), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'ugrd10m not found' - else - VarName='ugrd' - LayName='10 m above gnd' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,4),error) - if (error /= 0) print*,'ugrd10m not found' - endif - -! V10 - if (fformat == 'netcdf') then - VarName='vgrd10m' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,5), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'vgrd10m not found' - else - VarName='vgrd' - LayName='10 m above gnd' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,5),error) - if (error /= 0) print*,'vgrd10m not found' - endif - -! soil T - if (fformat == 'netcdf') then - VarName='soilt1' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,6), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'soilt1 not found' - else - VarName='tmp' - LayName='0-10 cm down' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,6),error) - if (error /= 0) print*,'soil T not found' - endif - if(debugprint) - + print*,'sample soil T= ',dum2d(im/2,jm/4,6),dum2d(im/2,jm/3,6), - + dum2d(im/2,jm/2,6) - -! snow depth - if (fformat == 'netcdf') then - VarName='snod' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,7), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'snod not found' - else - VarName='snod' - LayName='sfc' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,7),error) - if (error /= 0) print*,'snod not found' - endif - -! evaporation -!instantaneous surface latent heat net flux - if (fformat == 'netcdf') then - VarName='lhtfl' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,8), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'lhtfl not found' - else - VarName='lhtfl' - LayName='sfc' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,8),error) - if (error /= 0) print*,'lhtfl not found' - endif - if(debugprint) - + print*,'evaporation latent heat net flux= ', - + dum2d(im/2,jm/4,8),dum2d(im/2,jm/3,8) - if(debugprint) - + print*,'evaporation latent heat net flux stn 000692)= ', - + dum2d(2239,441,8) - -! total precip - if ( nf .le. nend1 ) then - fint = nint1 - else - fint = nint3 - endif -! for accumulated precipitation: - if (fformat == 'netcdf') then - VarName='prate_ave' - Zreverse='no' -!! call read_netcdf_p(ncid,im,jm,1,VarName,apcp,Zreverse,error) !current hour - call read_netcdf_p(ncid,im,jm,1,VarName,apcp,Zreverse, - & iope,ionproc,iocomms,error) -!! call read_netcdf_p(ncid2,im,jm,1,VarName,cpcp,Zreverse,error) !earlier hour - call read_netcdf_p(ncid2,im,jm,1,VarName,cpcp,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'prate_ave not found' - else - VarName='prate_ave' - LayName='sfc' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + apcp,error) - call read_nemsio(ffile2,im,jm,1,VarName,LayName, - + cpcp,error) - if (error /= 0) print*,'prate_ave2 not found' - endif - if(debugprint) - & print*,'sample fhour ,3= ', fhour, - & '1sample precip rate= ',apcp(im/2,jm/3),cpcp(im/2,jm/3) - ap=fhour-fint - do j=1,jm - do i=1,im - dum2d(i,j,9) =(apcp(i,j)*fhour-cpcp(i,j)*ap)*3600.0 - end do - end do - - if(debugprint) - & print*,'sample fhour ,5= ', fhour, - & 'sample total precip= ',dum2d(im/2,jm/4,9), - + dum2d(im/2,jm/3,9),dum2d(im/2,jm/2,9) - -! convective precip - if (fformat == 'netcdf') then - VarName='cprat_ave' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,apcp,Zreverse, - & iope,ionproc,iocomms,error) - call read_netcdf_p(ncid2,im,jm,1,VarName,cpcp,Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'cprat_ave not found' - else - VarName='cprat_ave' - LayName='sfc' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + apcp,error) - call read_nemsio(ffile2,im,jm,1,VarName,LayName, - + cpcp,error) - if (error /= 0) print*,'cprat_ave2 not found' - endif - ap=fhour-fint - do j=1,jm - do i=1,im - dum2d(i,j,10)=(apcp(i,j)*fhour-cpcp(i,j)*ap)*3600.0 - & - end do - end do - -! water equi - if (fformat == 'netcdf') then - VarName='weasd' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName,dum2d(:,:,11), - & Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'weasd not found' - else - VarName='weasd' - LayName='sfc' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,11),error) - if (error /= 0) print*,'weasd not found' - endif - -! low cloud fraction - if (fformat == 'netcdf') then - VarName='tcdc_avelcl' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName, - & dum2d(:,:,12),Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'tcdc_avelcl not found' - else - VarName='tcdc_ave' - LayName='low cld lay' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,12),error) - if (error /= 0) print*,'low cld lay not found' - endif - -! mid cloud fraction - if (fformat == 'netcdf') then - VarName='tcdc_avemcl' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName, - & dum2d(:,:,13),Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'tcdc_avemcl not found' - else - VarName='tcdc_ave' - LayName='mid cld lay' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,13),error) - if (error /= 0) print*,'mid cld lay not found' - endif - -! high cloud fraction - if (fformat == 'netcdf') then - VarName='tcdc_avehcl' - Zreverse='no' - call read_netcdf_p(ncid,im,jm,1,VarName, - & dum2d(:,:,14),Zreverse, - & iope,ionproc,iocomms,error) - if (error /= 0) print*,'tcdc_avehcl not found' - else - VarName='tcdc_ave' - LayName='high cld lay' - call read_nemsio(ffile,im,jm,1,VarName,LayName, - + dum2d(:,:,14),error) - if (error /= 0) print*,'high cld lay not found' - endif - - if(debugprint) - + print*,'sample high cloud frac= ',dum2d(im/2,jm/4,14), - + dum2d(im/2,jm/3,14),dum2d(im/2,jm/2,14) - - if (fformat == 'netcdf') then - error=nf90_close(ncid) - error=nf90_close(ncid2) - else - call nemsio_close(ffile,iret=error) - call nemsio_close(ffile2,iret=error) - endif - call date_and_time(date,time,zone,clocking) -! print *,'10reading surface data end= ', clocking - print *,'10date, time, zone',date, time, zone -! -! get the nearest neighbor i,j from the table -! - do np=1, npoint -! use read in predetermined i,j - if (np1==0) then - idum=iidum(np) - jdum=jjdum(np) - - else -! find nearest neighbor - rdum=rlon(np) - if(rdum<0.)rdum=rdum+360. - - do j=1,jm-1 - do i=1,im-1 - if((rdum>=gdlon(i,j) .and. rdum<=gdlon(i+1,j)) .and. - + (rlat(np)<=gdlat(i,j).and.rlat(np)>=gdlat(i,j+1)) ) then - if(landwater(np) == 2)then - idum=i - jdum=j - exit - else if(landwater(np) == lwmask(i,j))then - idum=i - jdum=j !1 - exit - else if(landwater(np) == lwmask(i+1,j))then - idum=i+1 - jdum=j ! 2 - exit - else if(landwater(np) == lwmask(i-1,j))then - idum=i-1 - jdum=j ! 3 - exit - else if(landwater(np) == lwmask(i,j+1))then - idum=i - jdum=j+1 ! 4 - exit - else if(landwater(np) == lwmask(i,j-1))then - idum=i - jdum=j-1 ! 5 - exit - else if(landwater(np) == lwmask(i+1,j-1))then - idum=i+1 - jdum=j-1 ! 6 - exit - else if(landwater(np) == lwmask(i+1,j+1))then - idum=i+1 - jdum=j+1 ! 7 - exit - else if(landwater(np) == lwmask(i-1,j+1))then - idum=i-1 - jdum=j+1 ! 8 - exit - else if(landwater(np) == lwmask(i-1,j-1))then - idum=i-1 - jdum=j-1 ! 9 - exit - else if(landwater(np) == lwmask(i,j+2))then - idum=i - jdum=j+2 ! 10 - exit - else if(landwater(np) == lwmask(i+2,j))then - idum=i+2 - jdum=j !11 - exit - else if(landwater(np) == lwmask(i,j-2))then - idum=i - jdum=j-2 ! 12 - exit - else if(landwater(np) == lwmask(i-2,j))then - idum=i-2 - jdum=j !13 - exit - else if(landwater(np) == lwmask(i-2,j+1))then - idum=i-2 - jdum=j+1 ! 14 - exit - else if(landwater(np) == lwmask(i-1,j+2))then - idum=i-1 - jdum=j+2 !15 - exit - else if(landwater(np) == lwmask(i+1,j+2))then - idum=i+1 - jdum=j+2 !16 - exit - else if(landwater(np) == lwmask(i+2,j+1))then - idum=i+2 - jdum=j+1 !17 - exit - else if(landwater(np) == lwmask(i+2,j-1))then - idum=i+2 - jdum=j-1 !18 - exit - else if(landwater(np) == lwmask(i+1,j-2))then - idum=i+1 - jdum=j-2 !19 - exit - else if(landwater(np) == lwmask(i-1,j-2))then - idum=i-1 - jdum=j-2 !20 - exit - else if(landwater(np) == lwmask(i-2,j-1))then - idum=i-2 - jdum=j-1 !21 - exit - else if(landwater(np) == lwmask(i-2,j-2))then - idum=i-2 - jdum=j-2 !22 - exit - else if(landwater(np) == lwmask(i+2,j-2))then - idum=i+2 - jdum=j-2 !23 - exit - else if(landwater(np) == lwmask(i+2,j+2))then - idum=i+2 - jdum=j+2 !24 - exit - else if(landwater(np) == lwmask(i-2,j+2))then - idum=i-2 - jdum=j+2 !25 - exit - else if(landwater(np) == lwmask(i+3,j))then - idum=i+3 - jdum=j !26 - exit - else if(landwater(np) == lwmask(i-3,j))then - idum=i-3 - jdum=j !27 - exit - else if(landwater(np) == lwmask(i,j+3))then - idum=i - jdum=j+3 !28 - exit - else if(landwater(np) == lwmask(i,j-3))then - idum=i - jdum=j-3 !29 - exit - else -CC print*,'no matching land sea mask np,landwater,i,j,mask= ' -CC print*, np,landwater(np),i,j,lwmask(i,j) -CC print*, ' So it takes i,j ' - idum=i - jdum=j - exit - end if - end if - end do - end do - - idum=max0(min0(idum,im),1) - jdum=max0(min0(jdum,jm),1) - endif !! read in i,j ends here - if (fhour==0.0) then - if(debugprint) then - write(nij,98) np,idum,jdum,rlat(np),rlon(np) - 98 FORMAT (3I6, 2F9.2) - if(elevstn(np)==-999.) elevstn(np)=hgt(idum,jdum) - write(9,99) np,rlat(np),rlon(np),elevstn(np),hgt(idum,jdum) - 99 FORMAT (I6, 4F9.2) - if(np==1 .or.np==100)print*,'nearest neighbor for station ',np - + ,idum,jdum,rlon(np),rlat(np),lwmask(i,j),landwater(np) - endif - endif - - grids(np,1)=hgt(idum,jdum) - grids(np,2)=pint(idum,jdum,1) - - sfc(5,np)=dum2d(idum,jdum,1) - sfc(6,np)=dum2d(idum,jdum,6) - sfc(17,np)=dum2d(idum,jdum,8) - sfc(12,np)=dum2d(idum,jdum,9) - sfc(11,np)=dum2d(idum,jdum,10) - sfc(10,np)=dum2d(idum,jdum,11) - sfc(27,np)=dum2d(idum,jdum,12) - sfc(26,np)=dum2d(idum,jdum,13) - sfc(25,np)=dum2d(idum,jdum,14) - sfc(34,np)=dum2d(idum,jdum,4) - sfc(35,np)=dum2d(idum,jdum,5) - sfc(30,np)=dum2d(idum,jdum,2) - sfc(31,np)=dum2d(idum,jdum,3) - -CC There may be cases where convective precip is greater than total precip -CC due to rounding and interpolation errors, correct it here -G.P. Lou: - if(sfc(11,np) .gt. sfc(12,np)) sfc(11,np)=sfc(12,np) - - do k=1,levs - grids(np,k+2)=t3d(idum,jdum,k) - grids(np,k+2+levs)=q3d(idum,jdum,k) - grids(np,k+2+2*levs)=omega3d(idum,jdum,k) - gridu(np,k)=uh(idum,jdum,k) - gridv(np,k)=vh(idum,jdum,k) - p1(np,k)=pint(idum,jdum,k+1) - z1(np,k)=zint(idum,jdum,k+1) -!! p1(np,k)=0.5*(pint(idum,jdum,k)+pint(idum,jdum,k+1)) -!! z1(np,k)=0.5*(zint(idum,jdum,k)+zint(idum,jdum,k+1)) - - end do - end do - - print*,'finish finding nearest neighbor for each station' - - do np = 1, npoint -! !ps in kPa - ps(np) = grids(np,2)/1000. !! surface pressure - enddo - -! -! ----------------- -! Put topo(1),surf press(2),vir temp(3:66),and specifi hum(67:130) in grids -! for each station -!! if(recn_dzdt == 0 ) then !!DZDT - do k = 1, levs - do np = 1, npoint - omega(np,k) = grids(np,2+levs*2+k) - enddo - enddo - if(debugprint) - + print*,'sample (omega) dzdt ', (omega(3,k),k=1,levs) -! -! move surface pressure to the station surface from the model surface -! - do np = 1, npoint -! -! when the station elevation information in the table says missing, -! use the model elevation -! -! print *, "elevstn = ", elevstn(np) - if(elevstn(np)==-999.) elevstn(np) = grids(np,1) - psn(np) = ps(np) - psone = ps(np) - call sigio_modpr(1,1,levs,nvcoord,idvc, - & idsl,vcoord,iret, - & ps=psone*1000,pd=pd3(np,1:levs)) - grids(np,2) = log(psn(np)) - if(np==11)print*,'station H,grud H,psn,ps,new pm', - & elevstn(np),grids(np,1),psn(np),ps(np) - if(np==11)print*,'pd3= ', pd3(np,1:levs) - enddo -! -!! test removing height adjustments - print*, 'do not do height adjustments' -! -! get sea-level pressure (Pa) and layer geopotential height -! - do k = 1, levs - do np = 1, npoint - ttnew(np,k) = grids(np,k+2) - qnew(np,k) = grids(np,k+levs+2) - enddo - enddo - - do np=1,npoint -!! call gslp(levs,elevstn(np),ps(np)*1000, - call gslp(levs,grids(np,1),ps(np)*1000, - & p1(np,1:levs),ttnew(np,1:levs),qnew(np,1:levs), - & pmsl(np),zp(np,1:levs),zp2(1:2)) - enddo - print *, 'call gslp pmsl= ', (pmsl(np),np=1,20) - if(recn_delz == -9999) then - print*, 'using calculated height ' - else - print*, 'using model height m' - do k = 1, levs - do np=1, npoint - zp(np,k) = z1(np,k) - enddo - enddo - endif - print*,'finish computing MSLP' - print*,'finish computing zp ', (zp(11,k),k=1,levs) - print*,'finish computing zp2(11-12) ', zp2(11),zp2(12) -! -! prepare buffer data -! - if(iope == 0) then - do np = 1, npoint - pi3(np,1)=psn(np)*1000 - do k=1,levs - pi3(np,k+1)=pi3(np,k)-pd3(np,k) !layer pressure (Pa) - enddo -!! ==ivalence (cstat1,rstat1) - cstat1=cstat(np) -!! data(1) = ifix(fhour+.2) * 3600 ! FORECAST TIME (SEC) -!! data(2) = istat(np) ! STATION NUMBER -!! data(3) = rstat1 ! STATION CHARACTER ID -!! data(4) = rlat(np) ! LATITUDE (DEG N) -!! data(5) = rlon(np) ! LONGITUDE (DEG E) -!! data(6) = elevstn(np) ! STATION ELEVATION (M) - data2(1) = ifix(fhour+.2) * 3600 ! FORECAST TIME (SEC) - data2(2) = istat(np) ! STATION NUMBER - data2(3) = rstat1 ! STATION CHARACTER ID - data2(4) = rlat(np) ! LATITUDE (DEG N) - data2(5) = rlon(np) ! LONGITUDE (DEG E) - data2(6) = elevstn(np) ! STATION ELEVATION (M) - psfc = 10. * psn(np) ! convert to MB - leveta = 1 - do k = 1, levs - kk= k/2 + 1 -! -! look for the layer above 500 mb for precip type computation -! - if(pi3(np,k).ge.50000.) leveta = k - ppi = pi3(np,k) - t = grids(np,k+2) - q = max(1.e-8,grids(np,2+k+levs)) - u = gridu(np,k) - v = gridv(np,k) -!! data((k-1)*6+7) = p1(np,k) ! PRESSURE (PA) at integer layer -!! data((k-1)*6+8) = t ! TEMPERATURE (K) -!! data((k-1)*6+9) = u ! U WIND (M/S) -!! data((k-1)*6+10) = v ! V WIND (M/S) -!! data((k-1)*6+11) = q ! HUMIDITY (KG/KG) -!! data((k-1)*6+12) = omega(np,k)*100. ! Omega (pa/sec) !changed to dzdt(cm/s) if available - if (mod(k,2)>0) then - data2((kk-1)*6+7) = p1(np,k) - data2((kk-1)*6+8) = t - data2((kk-1)*6+9) = u - data2((kk-1)*6+10) = v - data2((kk-1)*6+11) = q - data2((kk-1)*6+12) = omega(np,k)*100. - endif -!changed to dzdt(cm/s) if available - enddo -! -! process surface flux file fields -! -!! data(8+nflx) = psfc * 100. ! SURFACE PRESSURE (PA) -!! data(7+nflx) = pmsl(np) - data2(8+nflx2) = psfc * 100. ! SURFACE PRESSURE (PA) - data2(7+nflx2) = pmsl(np) -!! dtemp = .0065 * (grids(np,1) - elevstn(np)) -!! dtemp = .0100 * (grids(np,1) - elevstn(np)) -!! sfc(37,np) = data(6+nflx) * .01 -!! sfc(37,np) = data(7+nflx) * .01 -!! sfc(39,np) = zp2(2) !500 hPa height - sfc(37,np) = data2(7+nflx2) * .01 - sfc(39,np) = zp2(2) !500 hPa height -! -! do height correction if there is no snow or if the temp is less than 0 -! G.P.LOU: -! It was decided that no corrctions were needed due to higher model -! resolution. -! -! if(sfc(10,np)==0.) then -! sfc(30,np) = sfc(30,np) + dtemp -! sfc(5,np) = sfc(5,np) + dtemp -! endif -! if(sfc(10,np).gt.0..and.sfc(5,np).lt.273.16) then -! sfc(5,np) = sfc(5,np) + dtemp -! if(sfc(5,np).gt.273.16) then -! dtemp = sfc(5,np) - 273.16 -! sfc(5,np) = 273.16 -! endif -! sfc(30,np) = sfc(30,np) + dtemp -! endif -! -!G.P. Lou 20200501: -!convert instantaneous surface latent heat net flux to surface -!evapolation 1 W m-2 = 0.0864 MJ m-2 day-1 -! and 1 mm day-1 = 2.45 MJ m-2 day-1 -! equivament to 0.0864/2.54 = 0.035265 -! equivament to 2.54/0.0864 = 28.3565 - if(debugprint) - + print*,'evaporation (stn 000692)= ',sfc(17,np) -!! data(9+nflx) = sfc(5,np) ! tsfc (K) -!! data(10+nflx) = sfc(6,np) ! 10cm soil temp (K) -!!! data(11+nflx) = sfc(17,np)/28.3565 ! evaporation (kg/m**2) from (W m-2) -!! data(11+nflx) = sfc(17,np)*0.035265 ! evaporation (kg/m**2) from (W m-2) -!! data(12+nflx) = sfc(12,np) ! total precip (m) -!! data(13+nflx) = sfc(11,np) ! convective precip (m) -!! data(14+nflx) = sfc(10,np) ! water equi. snow (m) -!! data(15+nflx) = sfc(27,np) ! low cloud (%) -!! data(16+nflx) = sfc(26,np) ! mid cloud -!! data(17+nflx) = sfc(25,np) ! high cloud -!! data(18+nflx) = sfc(34,np) ! U10 (m/s) -!! data(19+nflx) = sfc(35,np) ! V10 (m/s) -!! data(20+nflx) = sfc(30,np) ! T2 (K) -!! data(21+nflx) = sfc(31,np) ! Q2 (K) - -!! data(22+nflx) = 0. -!! data(23+nflx) = 0. -!! data(24+nflx) = 0. -!! data(25+nflx) = 0. -!! create 64 level bufr files - data2(9+nflx2) = sfc(5,np) ! tsfc (K) - data2(10+nflx2) = sfc(6,np) ! 10cm soil temp (K) -!! data2(11+nflx2) = sfc(17,np)/28.3565 ! evaporation (kg/m**2) from (W m-2) - data2(11+nflx2) = sfc(17,np)*0.035265 ! evaporation (kg/m**2) from (W m-2) - data2(12+nflx2) = sfc(12,np) ! total precip (m) - data2(13+nflx2) = sfc(11,np) ! convective precip (m) - data2(14+nflx2) = sfc(10,np) ! water equi. snow (m) - data2(15+nflx2) = sfc(27,np) ! low cloud (%) - data2(16+nflx2) = sfc(26,np) ! mid cloud - data2(17+nflx2) = sfc(25,np) ! high cloud - data2(18+nflx2) = sfc(34,np) ! U10 (m/s) - data2(19+nflx2) = sfc(35,np) ! V10 (m/s) - data2(20+nflx2) = sfc(30,np) ! T2 (K) - data2(21+nflx2) = sfc(31,np) ! Q2 (K) - - data2(22+nflx2) = 0. - data2(23+nflx2) = 0. - data2(24+nflx2) = 0. - data2(25+nflx2) = 0. - nd = 0 - trace = .false. - DOMS=0. - DOMR=0. - DOMIP=0. - DOMZR=0. - if(np==1.or.np==2) nd = 1 - if(np==1.or.np==2) trace = .true. - - if(sfc(12,np).gt.0.) then !check for precip then calc precip type - do k = 1, leveta+1 - pp = p1(np,k) - ppi = pi3(np,k) - t = grids(np,k+2) - q = max(0.,grids(np,2+k+levs)) - u = gridu(np,k) - v = gridv(np,k) - if(q.gt.1.e-6.and.pp.ge.20000.) then - call tdew(td,t,q,pp) - call lcl(tlcl,plcl,t,pp,q) - call mstadb(qw,tw,pp,q,tlcl,plcl) - else - td = t - 30. - tw = t - 30. - endif -! Calpreciptype input variables - gt0(1,k)= t - gq0(1,k) = q - prsl(1,k) = pp - prsi(1,k)=ppi - phii(1,k)=zp(np,k) !height in meters - enddo -! Use GFS routine calpreciptype.f to calculate precip type - xlat=rlat(np) - xlon=rlon(np) - lm=leveta - lp1=leveta+1 -!! PREC=data(12+nflx) - PREC=data2(12+nflx2) - n3dfercld=1 !if =3 then use Ferriers Explicit Precip Type - TSKIN=1. !used in Ferriers Explicit Precip Scheme - SR=1. !used in Ferriers Explicit Precip Scheme - iseedl=jdate - call random_setseed(iseedl) - call random_number(randomno) - call calpreciptype(1,1,1,1,lm,lp1,randomno,xlat,xlon, !input - & gt0,gq0,prsl,prsi,PREC,phii,n3dfercld,TSKIN,SR,phy_f3d, !input - & DOMR,DOMZR,DOMIP,DOMS) ! Output vars - endif -!! data(nflx + 22) = DOMS -!! data(nflx + 23) = DOMIP -!! data(nflx + 24) = DOMZR -!! data(nflx + 25) = DOMR - data2(nflx2 + 22) = DOMS - data2(nflx2 + 23) = DOMIP - data2(nflx2 + 24) = DOMZR - data2(nflx2 + 25) = DOMR - if(np==1.or.np==100) then - print *, ' surface fields for hour', nf, 'np =', np - print *, (data2(l+nflx2),l=1,25) - print *, ' temperature sounding' - print 6101, (data2((k-1)*6+8),k=1,levso) - print *, ' omega sounding' - print *, (data2((k-1)*6+12),k=1,levso) - endif -C print *, 'in meteorg nfile1= ', nfile1 -!! write(nfile) data - write(nfile) data2 - enddo !End loop over stations np - endif - call date_and_time(date,time,zone,clocking) -! print *,'13reading write data end= ', clocking - print *,'13date, time, zone',date, time, zone - print *, 'in meteorg nf,nfile,nfhour= ', nf,nfile,nfhour - print *, 'Finished writing bufr data file' - 6101 format(2x,6f12.3) - 6102 format(2x,6f12.5) - 6103 format(2x,6f12.5) -! - close(unit=nfile) - return - 910 print *, ' error reading surface flux file' - end - -!----------------------------------------------------------------------- diff --git a/sorc/gfs_bufr.fd/modstuff1.f b/sorc/gfs_bufr.fd/modstuff1.f deleted file mode 100755 index 95d4138334..0000000000 --- a/sorc/gfs_bufr.fd/modstuff1.f +++ /dev/null @@ -1,75 +0,0 @@ - subroutine modstuff(km,idvc,idsl,nvcoord,vcoord,ps,psx,psy,d,u,v,& - pd,pm,om) -! pd,pi,pm,aps,apm,os,om,px,py) -!$$$ Subprogram documentation block -! -! Subprogram: modstuff Compute model coordinate dependent functions -! Prgmmr: Iredell Org: np23 Date: 1999-10-18 -! -! Abstract: This subprogram computes fields which depend on the model coordinate -! such as pressure thickness and vertical velocity. -! -! Program history log: -! 1999-10-18 Mark Iredell -! -! Usage: call modstuff(km,idvc,idsl,nvcoord,vcoord,ps,psx,psy,d,u,v,& -! pd,pi,pm,aps,apm,os,om,px,py) -! Input argument list: -! km integer number of levels -! idvc integer vertical coordinate id (1 for sigma and 2 for hybrid) -! idsl integer type of sigma structure (1 for phillips or 2 for mean) -! nvcoord integer number of vertical coordinates -! vcoord real (km+1,nvcoord) vertical coordinates -! ps real surface pressure (Pa) -! psx real log surface pressure x-gradient (1/m) -! psy real log surface pressure y-gradient (1/m) -! d real (km) wind divergence (1/s) -! u real (km) x-component wind (m/s) -! v real (km) y-component wind (m/s) -! Output argument list: -! pd real (km) pressure thickness (Pa) -! pi real (km+1) interface pressure (Pa) -! pm real (km) mid-layer pressure (Pa) -! aps real log surface pressure () -! apm real (km+1) log mid-layer pressure () -! os real (km) surface pressure tendency (Pa/s) -! om real (km) vertical velocity (Pa/s) -! px real (km) mid-layer pressure x-gradient (Pa/m) -! py real (km) mid-layer pressure y-gradient (Pa/m) -! -! Attributes: -! Language: Fortran 90 -! -!$$$ - use sigio_module - implicit none - integer,intent(in):: km,idvc,idsl,nvcoord - real,intent(in):: vcoord(km+1,nvcoord) - real,intent(in):: ps,psx,psy - real,intent(in):: u(km),v(km),d(km) - real,intent(out) :: pd(km),pm(km),om(km) - real aps,apm(km),os,pi(km+1),px(km),py(km) - real dpmdps(km),dpddps(km),dpidps(km+1),vgradp - integer k,iret -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - call sigio_modpr(1,1,km,nvcoord,idvc,idsl,vcoord,iret,& - ps=(/ps/),& - pm=pm,pd=pd,dpmdps=dpmdps,dpddps=dpddps) - pi(1)=ps - dpidps(1)=1. - do k=1,km - pi(k+1)=pi(k)-pd(k) - dpidps(k+1)=dpidps(k)-dpddps(k) - enddo - aps=log(ps) - apm=log(pm) - os=0 - do k=km,1,-1 - vgradp=u(k)*psx+v(k)*psy - os=os-vgradp*ps*(dpmdps(k)-dpidps(k+1))-d(k)*(pm(k)-pi(k+1)) - om(k)=vgradp*ps*dpmdps(k)+os - os=os-vgradp*ps*(dpidps(k)-dpmdps(k))-d(k)*(pi(k)-pm(k)) - enddo - px=ps*dpmdps*psx - py=ps*dpmdps*psy - end subroutine diff --git a/sorc/gfs_bufr.fd/mstadb.f b/sorc/gfs_bufr.fd/mstadb.f deleted file mode 100755 index e9b01e09c6..0000000000 --- a/sorc/gfs_bufr.fd/mstadb.f +++ /dev/null @@ -1,49 +0,0 @@ - SUBROUTINE MSTADB(Q2,T2,P2,Q1,T1,P1) -C -C THIS ROUTINE PROVIDES T2 AND QSAT AT T2 AT PRESSUE P2 THAT -C GIVES THE SAME EQUIVALENT POTENTIAL TEMPERATURE AS THE POINT -C ( T1, P1). FOR EASE OF COMPUTATION, Q1 IS REQUESTED -C - REAL L0, KAPPA - parameter (dtdp=4.5e-4,kappa=.286,g=9.81) - parameter (cp=1004.6,cl=4185.5,cpv=1846.0) - parameter (rv=461.5,l0=2.500e6,t0=273.16,es0=610.78) - parameter (cps=2106.0,hfus=3.3358e5,rd=287.05) - parameter (fact1=(CPV - CL) / RV,fact1i=(cps-cl)/rv) - parameter (fact2=(L0 + (CL - CPV) * T0) / RV) - parameter (fact2i=(L0 + hfus + (CL - cps) * T0) / RV) - parameter (fact3=1. / T0,eps=rd/rv,tmix=t0-20.) - FUNC(QS,T) = EXP(L0 * QS / (CP * T)) - DESDT(ES,T) = ES * (FACT1 / T + FACT2 / T ** 2) - DESDTi(ES,T) = ES * (FACT1i / T + FACT2i / T ** 2) -C FIRST GUESS OF T2 - T2 = T1 + DTDP * (P2 - P1) - PFACT = (1.E5 / P2) ** KAPPA - CONST = T1 * (1.E5 / P1) ** KAPPA * FUNC(Q1,T1) - ITER = 0 -C ITERATION STARTS - 10 CALL SVP(Q2,E2,P2,T2) - FACT4 = FUNC(Q2,T2) - F = T2 * PFACT * FACT4 - CONST - if(t2.ge.t0) then - desdt2 = desdt(e2,t2) - elseif(t2.lt.tmix) then - desdt2 = desdti(e2,t2) - else - w = (t2 - tmix) / (t0 - tmix) - desdt2 = w * desdt(e2,t2) + (1.-w) * desdti(e2,t2) - endif - DQSDT = (Q2 / E2) * (P2 / (P2 - (1.-EPS) * E2)) * DESDT2 - DFDT = PFACT * FACT4 + PFACT * FACT4 * (L0 * DQSDT / CP - & - L0 * Q2 / (CP * T2)) - DT = - F / DFDT - T2 = T2 + DT - IF(ABS(DT).LT..1) GOTO 100 - ITER = ITER + 1 - IF(ITER.LT.50) GOTO 10 - WRITE(6,*) ' MSTADB ITERATION DIVERGED, PROGRAM STOPPED' - STOP 'ABEND 240' - 100 CONTINUE - CALL SVP(Q2,E2,P2,T2) - RETURN - END diff --git a/sorc/gfs_bufr.fd/newsig1.f b/sorc/gfs_bufr.fd/newsig1.f deleted file mode 100755 index 2b0b9ccb99..0000000000 --- a/sorc/gfs_bufr.fd/newsig1.f +++ /dev/null @@ -1,65 +0,0 @@ -C----------------------------------------------------------------------- - SUBROUTINE NEWSIG(NSIL,IDVC,LEVS,NVCOORD,VCOORD,IRET) -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C -C SUBPROGRAM: NEWSIG GET NEW SIGMA STRUCTURE -C PRGMMR: IREDELL ORG: W/NMC23 DATE: 98-04-03 -C -C ABSTRACT: READ IN INTERFACE SIGMA VALUES (OR USE OLD VALUES) -C AND COMPUTE FULL SIGMA VALUES. - -C PROGRAM HISTORY LOG: -C 98-04-03 IREDELL -C -C USAGE: CALL NEWSIG(NSIL,IDVC,LEVS,NVCOORD,VCOORD,IRET) -C INPUT ARGUMENTS: -C NSIL INTEGER UNIT NUMBER OF NEW SIGMA INTERFACE VALUES -C IDVC INTEGER VERTICAL COORDINATE ID -C LEVS INTEGER NEW NUMBER OF LEVELS -C NVCOORD INTEGER NEW NUMBER OF VERTICAL COORDINATES -C OUTPUT ARGUMENTS: -C VCOORD REAL (LEVS+1,NVCOORD) NEW VERTICAL COORDINATES -C IRET INTEGER RETURN CODE -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN -C -C$$$ - REAL VCOORD(LEVS+1,NVCOORD) -C - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -C READ VERTICAL COORDINATES - READ(NSIL,*,IOSTAT=IRET) IDVCI,LEVSI,NVCOORDI - IF(IRET.EQ.0) THEN -C - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - READ(NSIL,*,IOSTAT=IRET) ((VCOORD(K,N),N=1,NVCOORD),K=1,LEVS+1) - IF(IRET.NE.0) RETURN - IF(IDVCI.NE.IDVC.OR.LEVSI.NE.LEVS) IRET=28 - IF(NVCOORDI.NE.NVCOORD) IRET=28 - IF(IRET.NE.0) RETURN -C - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -C READ INTERFACE HYBRID VALUES - ELSE - REWIND NSIL - READ(NSIL,*,IOSTAT=IRET) IDVCI - REWIND NSIL - IF(IRET.EQ.0.AND.(IDVCI.EQ.2.OR.IDVCI.EQ.3)) THEN - READ(NSIL,*,IOSTAT=IRET) IDVCI,LEVSI - READ(NSIL,*,IOSTAT=IRET) (VCOORD(K,1),VCOORD(K,2),K=1,LEVS+1) - IF(IRET.NE.0) RETURN - IF(IDVCI.NE.IDVC.OR.LEVSI.NE.LEVS) IRET=28 - IF(IRET.NE.0) RETURN -C - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -C READ INTERFACE SIGMA VALUES - ELSE - VCOORD(1,1)=1. - VCOORD(LEVS+1,1)=0. - READ(NSIL,*,IOSTAT=IRET) LEVSI - READ(NSIL,*,IOSTAT=IRET) (VCOORD(K,1),K=2,LEVS) - IF(IRET.NE.0) RETURN - IF(LEVSI.NE.LEVS) IRET=28 - IF(IRET.NE.0) RETURN - ENDIF -C - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ENDIF - IRET=0 - END diff --git a/sorc/gfs_bufr.fd/physcons.f b/sorc/gfs_bufr.fd/physcons.f deleted file mode 100755 index 03a0a8001d..0000000000 --- a/sorc/gfs_bufr.fd/physcons.f +++ /dev/null @@ -1,40 +0,0 @@ -module physcons - use machine,only:kind_phys -! Physical constants as set in NMC handbook from Smithsonian tables. -! Physical constants are given to 5 places. -! 1990/04/30: g and rd are made consistent with NWS usage. -! 2001/10/22: g made consistent with SI usage. -! Math constants - real(kind=kind_phys),parameter:: con_pi =3.141593e+0 ! pi - real(kind=kind_phys),parameter:: con_sqrt2 =1.414214e+0 ! square root of 2 - real(kind=kind_phys),parameter:: con_sqrt3 =1.732051e+0 ! square root of 3 -! Primary constants - real(kind=kind_phys),parameter:: con_rerth =6.3712e+6 ! radius of earth (m) - real(kind=kind_phys),parameter:: con_g =9.80665e+0! gravity (m/s2) - real(kind=kind_phys),parameter:: con_omega =7.2921e-5 ! ang vel of earth (1/s) - real(kind=kind_phys),parameter:: con_rd =2.8705e+2 ! gas constant air (J/kg/K) - real(kind=kind_phys),parameter:: con_rv =4.6150e+2 ! gas constant H2O (J/kg/K) - real(kind=kind_phys),parameter:: con_cp =1.0046e+3 ! spec heat air @p (J/kg/K) - real(kind=kind_phys),parameter:: con_cv =7.1760e+2 ! spec heat air @v (J/kg/K) - real(kind=kind_phys),parameter:: con_cvap =1.8460e+3 ! spec heat H2O gas (J/kg/K) - real(kind=kind_phys),parameter:: con_cliq =4.1855e+3 ! spec heat H2O liq (J/kg/K) - real(kind=kind_phys),parameter:: con_csol =2.1060e+3 ! spec heat H2O ice (J/kg/K) - real(kind=kind_phys),parameter:: con_hvap =2.5000e+6 ! lat heat H2O cond (J/kg) - real(kind=kind_phys),parameter:: con_hfus =3.3358e+5 ! lat heat H2O fusion (J/kg) - real(kind=kind_phys),parameter:: con_psat =6.1078e+2 ! pres at H2O 3pt (Pa) - real(kind=kind_phys),parameter:: con_sbc =5.6730e-8 ! stefan-boltzmann (W/m2/K4) - real(kind=kind_phys),parameter:: con_solr =1.3533e+3 ! solar constant (W/m2) - real(kind=kind_phys),parameter:: con_t0c =2.7315e+2 ! temp at 0C (K) - real(kind=kind_phys),parameter:: con_ttp =2.7316e+2 ! temp at H2O 3pt (K) - real(kind=kind_phys),parameter:: con_epsq =1.0E-12 ! min q for computing precip type -! Secondary constants - real(kind=kind_phys),parameter:: con_rocp =con_rd/con_cp - real(kind=kind_phys),parameter:: con_cpor =con_cp/con_rd - real(kind=kind_phys),parameter:: con_rog =con_rd/con_g - real(kind=kind_phys),parameter:: con_fvirt =con_rv/con_rd-1. - real(kind=kind_phys),parameter:: con_eps =con_rd/con_rv - real(kind=kind_phys),parameter:: con_epsm1 =con_rd/con_rv-1. - real(kind=kind_phys),parameter:: con_dldt =con_cvap-con_cliq - real(kind=kind_phys),parameter:: con_xpona =-con_dldt/con_rv - real(kind=kind_phys),parameter:: con_xponb =-con_dldt/con_rv+con_hvap/(con_rv*con_ttp) -end module diff --git a/sorc/gfs_bufr.fd/read_nemsio.f b/sorc/gfs_bufr.fd/read_nemsio.f deleted file mode 100644 index d1262e7974..0000000000 --- a/sorc/gfs_bufr.fd/read_nemsio.f +++ /dev/null @@ -1,55 +0,0 @@ - subroutine read_nemsio(gfile,im,jm,levs, - & VarName,LayName,Varout,iret) -!! This subroutine reads either 2d or 3d nemsio data -!! 12/12/2019 Guang Ping Lou - - use nemsio_module - implicit none - include 'mpif.h' - type(nemsio_gfile) :: gfile - character(len=20) :: VarName,LayName - integer,intent(in) :: im,jm,levs - real,intent(out) :: Varout(im,jm,levs) - real,dimension(im*jm) :: dum1d - integer :: iret,i,j,k,jj - - print*,'read_nemsio,im,jm,levs' - print*, im,jm,levs - print*,'VarName=',trim(VarName) - print*,'LayName=',trim(LayName) - if(levs > 1) then - do k =1, levs - call nemsio_readrecvw34(gfile,trim(VarName), - & trim(LayName),k,data=dum1d,iret=iret) - !print*,"VarName,k= ",trim(VarName), k - if (iret /= 0) then - print*,trim(VarName)," not found" - else - do j=1,jm - jj= (j-1)*im - do i=1,im - Varout(i,j,k) = dum1d(jj+i) - end do - end do - end if - enddo - - else - call nemsio_readrecvw34(gfile,trim(VarName), - & trim(LayName),1,data=dum1d,iret=iret) - !print*,"VarName= ",trim(VarName) - if (iret /= 0) then - print*,trim(VarName)," not found" - else - do j=1,jm - jj= (j-1)*im - do i=1,im - Varout(i,j,1) = dum1d(jj+i) - end do - end do - endif - - end if - - end subroutine read_nemsio - diff --git a/sorc/gfs_bufr.fd/read_netcdf.f b/sorc/gfs_bufr.fd/read_netcdf.f deleted file mode 100644 index a024323b31..0000000000 --- a/sorc/gfs_bufr.fd/read_netcdf.f +++ /dev/null @@ -1,55 +0,0 @@ - subroutine read_netcdf(ncid,im,jm,levs, - & VarName,Varout,Zreverse,iret) -!! This subroutine reads either 2d or 3d NetCDF data -!! 12/12/2019 Guang Ping Lou - - use netcdf - implicit none - include 'mpif.h' - character(len=20),intent(in) :: VarName - character(len=3),intent(in) :: Zreverse - integer,intent(in) :: ncid,im,jm,levs - real,intent(out) :: Varout(im,jm,levs) - real :: dummy3d(im,jm,levs) - integer :: iret,i,j,k,id_var,kk - - if(levs > 1) then - iret = nf90_inq_varid(ncid,trim(VarName),id_var) - !print*,stat,varname,id_var - iret = nf90_get_var(ncid,id_var,dummy3d) - if (iret /= 0) then - print*,VarName," not found" - else -!For FV3GFS NetCDF output, vertical layers need to be reversed - if(Zreverse == "yes" ) then - do k = 1, levs - kk=levs-k+1 - do j=1, jm - do i=1, im - Varout(i,j,k) = dummy3d(i,j,kk) - enddo - enddo - enddo - else - do k = 1, levs - do j=1, jm - do i=1, im - Varout(i,j,k) = dummy3d(i,j,k) - enddo - enddo - enddo - endif - endif - - else - iret = nf90_inq_varid(ncid,trim(VarName),id_var) - !print*,stat,varname,id_var - iret = nf90_get_var(ncid,id_var,Varout(:,:,1)) - if (iret /= 0) then - print*,VarName," not found" - endif - - end if - - end subroutine read_netcdf - diff --git a/sorc/gfs_bufr.fd/read_netcdf_p.f b/sorc/gfs_bufr.fd/read_netcdf_p.f deleted file mode 100644 index 4bfa8507be..0000000000 --- a/sorc/gfs_bufr.fd/read_netcdf_p.f +++ /dev/null @@ -1,113 +0,0 @@ - subroutine read_netcdf_p(ncid,im,jm,levs, - & VarName,Varout,Zreverse,iope,ionproc, - & iocomms,iret) -!! This subroutine reads either 2d or 3d NetCDF data in parallel -!! 02/08/2020 Guang Ping Lou - - use netcdf - use mpi - implicit none -!! include 'mpif.h' - character(len=20),intent(in) :: VarName - character(len=3),intent(in) :: Zreverse - integer,intent(in) :: ncid,im,jm,levs - real,intent(out) :: Varout(im,jm,levs) - real :: dummy3d(im,jm,levs) - integer :: iret,i,j,k,id_var,kk - integer :: iope,ionproc,iocomms - integer :: chunksize,ionproc1 - real, allocatable :: dummy(:,:,:) - integer start(3), count(3) - integer nskip - integer, allocatable :: starts(:) - integer, allocatable :: counts(:) - integer, allocatable :: chunksizes(:) - integer, allocatable :: rdispls(:) - integer, allocatable :: ii(:) - - if(levs > 1) then - nskip = int(levs/ionproc) + 1 - k=ionproc*nskip - if(k > levs) then - kk=(k-levs)/nskip - ionproc1=ionproc - kk - else - ionproc1=ionproc - endif - iret = nf90_inq_varid(ncid,trim(VarName),id_var) - allocate(starts(ionproc1), counts(ionproc1),ii(ionproc1)) - allocate(chunksizes(ionproc1)) - allocate(rdispls(ionproc1)) - print*,'ionproc,ionproc1,nskip= ',ionproc,ionproc1, nskip - print*,'trim(VarName)in read= ',trim(VarName) - starts(1) = 1 - ii(1) = 1 - do i = 2, ionproc1 - starts(i) = 1 + (i-1)*nskip - ii(i)= ii(i-1) + 1 - end do - do i=1, ionproc1 - 1 - counts(i) = starts(i+1) - starts(i) - end do - counts(ionproc1) = levs - starts(ionproc1)+1 - print*,'starts= ',starts - print*, 'counts= ', counts - k=ii(iope+1) - start = (/1,1,starts(k)/) - count = (/im,jm,counts(k)/) - chunksizes(:) = im * jm * counts(:) - rdispls(:) = im * jm * (starts(:)-1) - print*, 'iope,k,start,count= ',iope,k,start(3),count(3) - print*, 'chunksizes= ', chunksizes - print*, 'rdispls= ', rdispls - allocate (dummy(im,jm,count(3))) - iret=nf90_get_var(ncid,id_var,dummy, - & start=start,count=count) - if (iret /= 0) then - print*,VarName," not found" - endif - print*,'start(3),st(3):cnt(3)-1=',start(3),(start(3)+count(3)-1) - print*,'dummy(im/2,jm/2,:)= ', dummy(im/2,jm/2,:) - call mpi_allgatherv(dummy,chunksizes(k),mpi_real,dummy3d, - & chunksizes, rdispls, mpi_real, iocomms, iret) - print*,'VarName= ', VarName - print*,'dummy3d(im/2,jm/2,:)= ', dummy3d(im/2,jm/2,:) -!! call mpi_alltoallv(dummy, chunksizes, sdispls, mpi_real, dummy3d, -!! & chunksizes, rdispls, mpi_real, iocomms, iret) - -! enddo -!For FV3GFS NetCDF output, vertical layers need to be reversed - if(Zreverse == "yes" ) then - do k = 1, levs - kk=levs-k+1 - do j=1, jm - do i=1, im - Varout(i,j,k) = dummy3d(i,j,kk) - enddo - enddo - enddo - else - do k = 1, levs - do j=1, jm - do i=1, im - Varout(i,j,k) = dummy3d(i,j,k) - enddo - enddo - enddo - endif - deallocate(starts, counts,ii) - deallocate(chunksizes) - deallocate(rdispls) - deallocate (dummy) - - else - iret = nf90_inq_varid(ncid,trim(VarName),id_var) - print*,'trim(VarName)in read= ',trim(VarName) - iret = nf90_get_var(ncid,id_var,Varout(:,:,1)) - if (iret /= 0) then - print*,VarName," not found" - endif - - end if - end subroutine read_netcdf_p - diff --git a/sorc/gfs_bufr.fd/rsearch.f b/sorc/gfs_bufr.fd/rsearch.f deleted file mode 100755 index 73141facf5..0000000000 --- a/sorc/gfs_bufr.fd/rsearch.f +++ /dev/null @@ -1,145 +0,0 @@ -C----------------------------------------------------------------------- - SUBROUTINE RSEARCH(IM,KM1,IXZ1,KXZ1,Z1,KM2,IXZ2,KXZ2,Z2,IXL2,KXL2, - & L2) -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C -C SUBPROGRAM: RSEARCH SEARCH FOR A SURROUNDING REAL INTERVAL -C PRGMMR: IREDELL ORG: W/NMC23 DATE: 98-05-01 -C -C ABSTRACT: THIS SUBPROGRAM SEARCHES MONOTONIC SEQUENCES OF REAL NUMBERS -C FOR INTERVALS THAT SURROUND A GIVEN SEARCH SET OF REAL NUMBERS. -C THE SEQUENCES MAY BE MONOTONIC IN EITHER DIRECTION; THE REAL NUMBERS -C MAY BE SINGLE OR DOUBLE PRECISION; THE INPUT SEQUENCES AND SETS -C AND THE OUTPUT LOCATIONS MAY BE ARBITRARILY DIMENSIONED. -C -C PROGRAM HISTORY LOG: -C 1999-01-05 MARK IREDELL -C -C USAGE: CALL RSEARCH(IM,KM1,IXZ1,KXZ1,Z1,KM2,IXZ2,KXZ2,Z2,IXL2,KXL2, -C & L2) -C INPUT ARGUMENT LIST: -C IM INTEGER NUMBER OF SEQUENCES TO SEARCH -C KM1 INTEGER NUMBER OF POINTS IN EACH SEQUENCE -C IXZ1 INTEGER SEQUENCE SKIP NUMBER FOR Z1 -C KXZ1 INTEGER POINT SKIP NUMBER FOR Z1 -C Z1 REAL (1+(IM-1)*IXZ1+(KM1-1)*KXZ1) -C SEQUENCE VALUES TO SEARCH -C (Z1 MUST BE MONOTONIC IN EITHER DIRECTION) -C KM2 INTEGER NUMBER OF POINTS TO SEARCH FOR -C IN EACH RESPECTIVE SEQUENCE -C IXZ2 INTEGER SEQUENCE SKIP NUMBER FOR Z2 -C KXZ2 INTEGER POINT SKIP NUMBER FOR Z2 -C Z2 REAL (1+(IM-1)*IXZ2+(KM2-1)*KXZ2) -C SET OF VALUES TO SEARCH FOR -C (Z2 NEED NOT BE MONOTONIC) -C IXL2 INTEGER SEQUENCE SKIP NUMBER FOR L2 -C KXL2 INTEGER POINT SKIP NUMBER FOR L2 -C -C OUTPUT ARGUMENT LIST: -C L2 INTEGER (1+(IM-1)*IXL2+(KM2-1)*KXL2) -C INTERVAL LOCATIONS HAVING VALUES FROM 0 TO KM1 -C (Z2 WILL BE BETWEEN Z1(L2) AND Z1(L2+1)) -C -C SUBPROGRAMS CALLED: -C SBSRCH ESSL BINARY SEARCH -C DBSRCH ESSL BINARY SEARCH -C -C REMARKS: -C IF THE ARRAY Z1 IS DIMENSIONED (IM,KM1), THEN THE SKIP NUMBERS ARE -C IXZ1=1 AND KXZ1=IM; IF IT IS DIMENSIONED (KM1,IM), THEN THE SKIP -C NUMBERS ARE IXZ1=KM1 AND KXZ1=1; IF IT IS DIMENSIONED (IM,JM,KM1), -C THEN THE SKIP NUMBERS ARE IXZ1=1 AND KXZ1=IM*JM; ETCETERA. -C SIMILAR EXAMPLES APPLY TO THE SKIP NUMBERS FOR Z2 AND L2. -C -C RETURNED VALUES OF 0 OR KM1 INDICATE THAT THE GIVEN SEARCH VALUE -C IS OUTSIDE THE RANGE OF THE SEQUENCE. -C -C IF A SEARCH VALUE IS IDENTICAL TO ONE OF THE SEQUENCE VALUES -C THEN THE LOCATION RETURNED POINTS TO THE IDENTICAL VALUE. -C IF THE SEQUENCE IS NOT STRICTLY MONOTONIC AND A SEARCH VALUE IS -C IDENTICAL TO MORE THAN ONE OF THE SEQUENCE VALUES, THEN THE -C LOCATION RETURNED MAY POINT TO ANY OF THE IDENTICAL VALUES. -C -C TO BE EXACT, FOR EACH I FROM 1 TO IM AND FOR EACH K FROM 1 TO KM2, -C Z=Z2(1+(I-1)*IXZ2+(K-1)*KXZ2) IS THE SEARCH VALUE AND -C L=L2(1+(I-1)*IXL2+(K-1)*KXL2) IS THE LOCATION RETURNED. -C IF L=0, THEN Z IS LESS THAN THE START POINT Z1(1+(I-1)*IXZ1) -C FOR ASCENDING SEQUENCES (OR GREATER THAN FOR DESCENDING SEQUENCES). -C IF L=KM1, THEN Z IS GREATER THAN OR EQUAL TO THE END POINT -C Z1(1+(I-1)*IXZ1+(KM1-1)*KXZ1) FOR ASCENDING SEQUENCES -C (OR LESS THAN OR EQUAL TO FOR DESCENDING SEQUENCES). -C OTHERWISE Z IS BETWEEN THE VALUES Z1(1+(I-1)*IXZ1+(L-1)*KXZ1) AND -C Z1(1+(I-1)*IXZ1+(L-0)*KXZ1) AND MAY EQUAL THE FORMER. -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN -C -C$$$ - IMPLICIT NONE - INTEGER,INTENT(IN):: IM,KM1,IXZ1,KXZ1,KM2,IXZ2,KXZ2,IXL2,KXL2 - REAL,INTENT(IN):: Z1(1+(IM-1)*IXZ1+(KM1-1)*KXZ1) - REAL,INTENT(IN):: Z2(1+(IM-1)*IXZ2+(KM2-1)*KXZ2) - INTEGER,INTENT(OUT):: L2(1+(IM-1)*IXL2+(KM2-1)*KXL2) - INTEGER(4) INCX,N,INCY,M,INDX(KM2),RC(KM2),IOPT - INTEGER I,K1,K2,CT -C - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -C FIND THE SURROUNDING INPUT INTERVAL FOR EACH OUTPUT POINT. - print*, IM,KM1,KM2,INCX,INCY - DO I=1,IM - IF(Z1(1+(I-1)*IXZ1).LE.Z1(1+(I-1)*IXZ1+(KM1-1)*KXZ1)) THEN -C INPUT COORDINATE IS MONOTONICALLY ASCENDING. - INCX=KXZ2 - N=KM2 - INCY=KXZ1 - M=KM1 - IOPT=1 -! IF(DIGITS(1.).LT.DIGITS(1._8)) THEN -! CALL SBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ELSE -! CALL DBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ENDIF -! DO K2=1,KM2 -! L2(1+(I-1)*IXL2+(K2-1)*KXL2)=INDX(K2)-RC(K2) -! ENDDO - DO K2=1,KM2 - L2(K2)=KM1 - DO K1=(1+(I-1)*IXZ1),(1+(I-1)*IXZ1+(KM1-1)*KXZ1)-1 - IF(Z1(K1)>=Z2(K2).AND.Z1(K1+1)>Z2(K2)) THEN - L2(K2)=K1 - EXIT - ENDIF - ENDDO - ENDDO - ELSE -C INPUT COORDINATE IS MONOTONICALLY DESCENDING. - INCX=KXZ2 - N=KM2 - INCY=-KXZ1 - M=KM1 - IOPT=0 -! IF(DIGITS(1.).LT.DIGITS(1._8)) THEN -! CALL SBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ELSE -! CALL DBSRCH(Z2(1+(I-1)*IXZ2),INCX,N, -! & Z1(1+(I-1)*IXZ1),INCY,M,INDX,RC,IOPT) -! ENDIF -! DO K2=1,KM2 -! L2(1+(I-1)*IXL2+(K2-1)*KXL2)=KM1+1-INDX(K2) -! ENDDO - DO K2=1,KM2 - L2(K2)=KM1 - CT=0 - DO K1=(1+(I-1)*IXZ1+(KM1-1)*KXZ1),(1+(I-1)*IXZ1)+1,-1 - CT=CT+1 - IF(Z2(K2)<=Z1(K1).AND.Z2(K2) /dev/null && pwd) +top_dir=$(cd "$(dirname "${script_dir}")" &> /dev/null && pwd) +cd "${script_dir}" -if [ $RUN_ENVIR != emc -a $RUN_ENVIR != nco ]; then - echo ' Syntax: link_workflow.sh ( nco | emc ) ( cray | dell | hera | orion | jet | stampede )' - exit 1 -fi -if [ $machine != cray -a $machine != dell -a $machine != hera -a $machine != orion -a $machine != jet -a $machine != stampede ]; then - echo ' Syntax: link_workflow.sh ( nco | emc ) ( cray | dell | hera | orion | jet | stampede )' +export COMPILER="intel" +# shellcheck disable=SC1091 +source gfs_utils.fd/ush/detect_machine.sh # (sets MACHINE_ID) +# shellcheck disable= +machine=$(echo "${MACHINE_ID}" | cut -d. -f1) + +#------------------------------ +#--model fix fields +#------------------------------ +case "${machine}" in + "wcoss2") FIX_DIR="/lfs/h2/emc/global/noscrub/emc.global/FIX/fix" ;; + "hera") FIX_DIR="/scratch1/NCEPDEV/global/glopara/fix" ;; + "orion") FIX_DIR="/work/noaa/global/glopara/fix" ;; + "jet") FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix" ;; + "s4") FIX_DIR="/data/prod/glopara/fix" ;; + *) + echo "FATAL: Unknown target machine ${machine}, couldn't set FIX_DIR" exit 1 -fi + ;; +esac + +# Source fix version file +source "${top_dir}/versions/fix.ver" LINK="ln -fs" SLINK="ln -fs" -[[ $RUN_ENVIR = nco ]] && LINK="cp -rp" - -pwd=$(pwd -P) +if [[ "${RUN_ENVIR}" == "nco" ]]; then + LINK="cp -rp" +fi # Link post -$LINK ufs_model.fd/FV3/upp upp.fd - -#------------------------------ -#--model fix fields -#------------------------------ -if [ $machine = "cray" ]; then - FIX_DIR="/gpfs/hps3/emc/global/noscrub/emc.glopara/git/fv3gfs/fix" -elif [ $machine = "dell" ]; then - FIX_DIR="/gpfs/dell2/emc/modeling/noscrub/emc.glopara/git/fv3gfs/fix_NEW" -elif [ $machine = "hera" ]; then - FIX_DIR="/scratch1/NCEPDEV/global/glopara/fix_NEW" -elif [ $machine = "orion" ]; then - FIX_DIR="/work/noaa/global/glopara/fix_NEW" -elif [ $machine = "jet" ]; then - FIX_DIR="/lfs4/HFIP/hfv3gfs/glopara/git/fv3gfs/fix_NEW" -elif [ $machine = "stampede" ]; then - FIX_DIR="/work/07738/jkuang/stampede2/tempFixICdir/fix_UFSp6" -fi +[[ -d upp.fd ]] && rm -rf upp.fd +${LINK} ufs_model.fd/FV3/upp upp.fd -if [ ! -z $FIX_DIR ]; then - if [ ! -d ${pwd}/../fix ]; then mkdir ${pwd}/../fix; fi +if [[ -n "${FIX_DIR}" ]]; then + if [[ ! -d "${top_dir}/fix" ]]; then mkdir "${top_dir}/fix" || exit 1; fi fi -cd ${pwd}/../fix ||exit 8 -for dir in fix_aer \ - fix_am \ - fix_chem \ - fix_fv3_gmted2010 \ - fix_gldas \ - fix_lut \ - fix_fv3_fracoro \ - fix_orog \ - fix_sfc_climo \ - fix_verif \ - fix_cice \ - fix_mom6 \ - fix_cpl \ - fix_wave \ - fix_reg2grb2 \ - fix_ugwd +cd "${top_dir}/fix" || exit 1 +for dir in aer \ + am \ + chem \ + cice \ + cpl \ + datm \ + gsi \ + lut \ + mom6 \ + orog \ + reg2grb2 \ + sfc_climo \ + ugwd \ + verif \ + wave do - if [ -d $dir ]; then - [[ $RUN_ENVIR = nco ]] && chmod -R 755 $dir - rm -rf $dir + if [[ -d "${dir}" ]]; then + [[ "${RUN_ENVIR}" == "nco" ]] && chmod -R 755 "${dir}" + rm -rf "${dir}" fi - $LINK $FIX_DIR/$dir . + fix_ver="${dir}_ver" + ${LINK} "${FIX_DIR}/${dir}/${!fix_ver}" "${dir}" done -if [ -d ${pwd}/ufs_utils.fd ]; then - cd ${pwd}/ufs_utils.fd/fix - ./link_fixdirs.sh $RUN_ENVIR $machine + +if [[ -d "${script_dir}/ufs_utils.fd" ]]; then + cd "${script_dir}/ufs_utils.fd/fix" || exit 1 + ./link_fixdirs.sh "${RUN_ENVIR}" "${machine}" 2> /dev/null fi #--------------------------------------- #--add files from external repositories #--------------------------------------- -cd ${pwd}/../jobs ||exit 8 - $LINK ../sorc/gldas.fd/jobs/JGDAS_ATMOS_GLDAS . -cd ${pwd}/../parm ||exit 8 - # [[ -d post ]] && rm -rf post - # $LINK ../sorc/upp.fd/parm post - [[ -d gldas ]] && rm -rf gldas - $LINK ../sorc/gldas.fd/parm gldas -cd ${pwd}/../parm/post ||exit 8 +cd "${top_dir}/parm/post" || exit 1 for file in postxconfig-NT-GEFS-ANL.txt postxconfig-NT-GEFS-F00.txt postxconfig-NT-GEFS.txt postxconfig-NT-GFS-ANL.txt \ postxconfig-NT-GFS-F00-TWO.txt postxconfig-NT-GFS-F00.txt postxconfig-NT-GFS-FLUX-F00.txt postxconfig-NT-GFS-FLUX.txt \ postxconfig-NT-GFS-GOES.txt postxconfig-NT-GFS-TWO.txt postxconfig-NT-GFS-WAFS-ANL.txt postxconfig-NT-GFS-WAFS.txt \ @@ -99,268 +126,308 @@ cd ${pwd}/../parm/post ||exit 8 post_tag_gfs128 post_tag_gfs65 gtg.config.gfs gtg_imprintings.txt nam_micro_lookup.dat \ AEROSOL_LUTS.dat optics_luts_DUST.dat optics_luts_SALT.dat optics_luts_SOOT.dat optics_luts_SUSO.dat optics_luts_WASO.dat \ ; do - $LINK ../../sorc/upp.fd/parm/$file . + ${LINK} "${script_dir}/upp.fd/parm/${file}" . done -cd ${pwd}/../scripts ||exit 8 - $LINK ../sorc/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh . - $LINK ../sorc/gldas.fd/scripts/exgdas_atmos_gldas.sh . -cd ${pwd}/../ush ||exit 8 + +cd "${top_dir}/scripts" || exit 8 + ${LINK} "${script_dir}/ufs_utils.fd/scripts/exemcsfc_global_sfc_prep.sh" . +cd "${top_dir}/ush" || exit 8 for file in emcsfc_ice_blend.sh fv3gfs_driver_grid.sh fv3gfs_make_orog.sh global_cycle_driver.sh \ emcsfc_snow.sh fv3gfs_filter_topo.sh global_cycle.sh fv3gfs_make_grid.sh ; do - $LINK ../sorc/ufs_utils.fd/ush/$file . + ${LINK} "${script_dir}/ufs_utils.fd/ush/${file}" . done - for file in gldas_archive.sh gldas_forcing.sh gldas_get_data.sh gldas_process_data.sh gldas_liscrd.sh gldas_post.sh ; do - $LINK ../sorc/gldas.fd/ush/$file . + for file in finddate.sh make_ntc_bull.pl make_NTC_file.pl make_tif.sh month_name.sh ; do + ${LINK} "${script_dir}/gfs_utils.fd/ush/${file}" . done - #----------------------------------- #--add gfs_wafs link if checked out -if [ -d ${pwd}/gfs_wafs.fd ]; then +if [[ -d "${script_dir}/gfs_wafs.fd" ]]; then #----------------------------------- - cd ${pwd}/../jobs ||exit 8 - $LINK ../sorc/gfs_wafs.fd/jobs/* . - cd ${pwd}/../parm ||exit 8 + cd "${top_dir}/jobs" || exit 1 + ${LINK} "${script_dir}/gfs_wafs.fd/jobs"/* . + cd "${top_dir}/parm" || exit 1 [[ -d wafs ]] && rm -rf wafs - $LINK ../sorc/gfs_wafs.fd/parm/wafs wafs - cd ${pwd}/../scripts ||exit 8 - $LINK ../sorc/gfs_wafs.fd/scripts/* . - cd ${pwd}/../ush ||exit 8 - $LINK ../sorc/gfs_wafs.fd/ush/* . - cd ${pwd}/../fix ||exit 8 + ${LINK} "${script_dir}/gfs_wafs.fd/parm/wafs" wafs + cd "${top_dir}/scripts" || exit 1 + ${LINK} "${script_dir}/gfs_wafs.fd/scripts"/* . + cd "${top_dir}/ush" || exit 1 + ${LINK} "${script_dir}/gfs_wafs.fd/ush"/* . + cd "${top_dir}/fix" || exit 1 [[ -d wafs ]] && rm -rf wafs - $LINK ../sorc/gfs_wafs.fd/fix/* . + ${LINK} "${script_dir}/gfs_wafs.fd/fix"/* . fi #------------------------------ -#--add GSI/EnKF file +#--add GDASApp fix directory +#------------------------------ +if [[ -d "${script_dir}/gdas.cd" ]]; then + cd "${top_dir}/fix" || exit 1 + [[ ! -d gdas ]] && mkdir -p gdas + cd gdas || exit 1 + for gdas_sub in crtm fv3jedi gsibec; do + if [[ -d "${gdas_sub}" ]]; then + rm -rf "${gdas_sub}" + fi + fix_ver="gdas_${gdas_sub}_ver" + ${LINK} "${FIX_DIR}/gdas/${gdas_sub}/${!fix_ver}" "${gdas_sub}" + done +fi + +#------------------------------ +#--add GDASApp files #------------------------------ -cd ${pwd}/../jobs ||exit 8 - $LINK ../sorc/gsi.fd/jobs/JGLOBAL_ATMOS_ANALYSIS . - $LINK ../sorc/gsi.fd/jobs/JGLOBAL_ATMOS_ANALYSIS_CALC . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ATMOS_ANALYSIS_DIAG . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_SELECT_OBS . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_DIAG . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_UPDATE . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_ECEN . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_SFC . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_FCST . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ENKF_POST . - $LINK ../sorc/gsi.fd/jobs/JGDAS_ATMOS_CHGRES_FORENKF . -cd ${pwd}/../scripts ||exit 8 - $LINK ../sorc/gsi.fd/scripts/exglobal_atmos_analysis.sh . - $LINK ../sorc/gsi.fd/scripts/exglobal_atmos_analysis_calc.sh . - $LINK ../sorc/gsi.fd/scripts/exglobal_diag.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_select_obs.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_update.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_ecen.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_sfc.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_fcst.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_enkf_post.sh . - $LINK ../sorc/gsi.fd/scripts/exgdas_atmos_chgres_forenkf.sh . -cd ${pwd}/../fix ||exit 8 - [[ -d fix_gsi ]] && rm -rf fix_gsi - $LINK ../sorc/gsi.fd/fix fix_gsi -cd ${pwd}/../ush ||exit 8 - $LINK ../sorc/gsi.fd/ush/gsi_utils.py . - $LINK ../sorc/gsi.fd/ush/calcanl_gfs.py . - $LINK ../sorc/gsi.fd/ush/calcinc_gfs.py . - $LINK ../sorc/gsi.fd/ush/getncdimlen . +if [[ -d "${script_dir}/gdas.cd" ]]; then + cd "${top_dir}/ush" || exit 1 + ${LINK} "${script_dir}/gdas.cd/ush/ufsda" . + ${LINK} "${script_dir}/gdas.cd/ush/jediinc2fv3.py" . + ${LINK} "${script_dir}/gdas.cd/build/bin/imsfv3_scf2ioda.py" . + ${LINK} "${script_dir}/gdas.cd/ush/land/letkf_create_ens.py" . +fi #------------------------------ #--add DA Monitor file (NOTE: ensure to use correct version) #------------------------------ -cd ${pwd}/../fix ||exit 8 - [[ -d gdas ]] && rm -rf gdas - mkdir -p gdas - cd gdas - $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/fix/gdas_minmon_cost.txt . - $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/fix/gdas_minmon_gnorm.txt . - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar . - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt . - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt . -cd ${pwd}/../jobs ||exit 8 - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/jobs/JGDAS_ATMOS_VMINMON . - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/jobs/JGFS_ATMOS_VMINMON . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/jobs/JGDAS_ATMOS_VERFOZN . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/jobs/JGDAS_ATMOS_VERFRAD . -cd ${pwd}/../parm ||exit 8 +if [[ -d "${script_dir}/gsi_monitor.fd" ]]; then + + cd "${top_dir}/fix" || exit 1 + [[ ! -d gdas ]] && ( mkdir -p gdas || exit 1 ) + cd gdas || exit 1 + ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_cost.txt" . + ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/fix/gdas_minmon_gnorm.txt" . + ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_base.tar" . + ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/fix/gdas_oznmon_satype.txt" . + ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_base.tar" . + ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_satype.txt" . + ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/fix/gdas_radmon_scaninfo.txt" . + cd "${top_dir}/parm" || exit 1 [[ -d mon ]] && rm -rf mon mkdir -p mon - cd mon - $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm da_mon.parm -# $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/parm/gdas_minmon.parm . -# $LINK ../../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/parm/gfs_minmon.parm . - $LINK ../../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm . -# $LINK ../../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm . -cd ${pwd}/../scripts ||exit 8 - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gdas.v1.0.0/scripts/exgdas_atmos_vminmon.sh . - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/gfs.v1.0.0/scripts/exgfs_atmos_vminmon.sh . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/gdas_oznmon/scripts/exgdas_atmos_verfozn.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/gdas_radmon/scripts/exgdas_atmos_verfrad.sh . -cd ${pwd}/../ush ||exit 8 - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_costs.pl . - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_gnorms.pl . - $LINK ../sorc/gsi.fd/util/Minimization_Monitor/nwprod/minmon_shared.v1.0.1/ush/minmon_xtrct_reduct.pl . - $LINK ../sorc/gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/ush/ozn_xtrct.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_ck_stdout.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_err_rpt.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_angle.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcoef.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_bcor.sh . - $LINK ../sorc/gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/ush/radmon_verf_time.sh . - + cd mon || exit 1 + ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" da_mon.parm + # ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gdas/parm/gdas_minmon.parm" . + # ${LINK} "${script_dir}/gsi_monitor.fd/src/Minimization_Monitor/nwprod/gfs/parm/gfs_minmon.parm" . + ${LINK} "${script_dir}/gsi_monitor.fd/src/Ozone_Monitor/nwprod/gdas_oznmon/parm/gdas_oznmon.parm" . + # ${LINK} "${script_dir}/gsi_monitor.fd/src/Radiance_Monitor/nwprod/gdas_radmon/parm/gdas_radmon.parm" . +fi #------------------------------ -#--link executables +#--link executables #------------------------------ -if [ ! -d $pwd/../exec ]; then mkdir $pwd/../exec ; fi -cd $pwd/../exec +if [[ ! -d "${top_dir}/exec" ]]; then mkdir "${top_dir}/exec" || exit 1 ; fi +cd "${top_dir}/exec" || exit 1 -[[ -s gaussian_sfcanl.exe ]] && rm -f gaussian_sfcanl.exe -$LINK ../sorc/install/bin/gaussian_sfcanl.x gaussian_sfcanl.exe -for workflowexec in fbwndgfs gfs_bufr regrid_nemsio supvit syndat_getjtbul \ - syndat_maksynrc syndat_qctropcy tocsbufr ; do - [[ -s $workflowexec ]] && rm -f $workflowexec - $LINK ../sorc/install/bin/${workflowexec}.x $workflowexec -done -for workflowexec in enkf_chgres_recenter.x enkf_chgres_recenter_nc.x fv3nc2nemsio.x \ - tave.x vint.x reg2grb2.x ; do - [[ -s $workflowexec ]] && rm -f $workflowexec - $LINK ../sorc/install/bin/$workflowexec . +for utilexe in fbwndgfs.x gaussian_sfcanl.x gfs_bufr.x regrid_nemsio.x supvit.x syndat_getjtbul.x \ + syndat_maksynrc.x syndat_qctropcy.x tocsbufr.x enkf_chgres_recenter.x overgridid.x \ + mkgfsawps.x enkf_chgres_recenter_nc.x fv3nc2nemsio.x tave.x vint.x reg2grb2.x ; do + [[ -s "${utilexe}" ]] && rm -f "${utilexe}" + ${LINK} "${script_dir}/gfs_utils.fd/install/bin/${utilexe}" . done -[[ -s ufs_model ]] && rm -f ufs_model -$LINK ../sorc/ufs_model.fd/build/ufs_model . +[[ -s "ufs_model.x" ]] && rm -f ufs_model.x +${LINK} "${script_dir}/ufs_model.fd/tests/ufs_model.x" . -[[ -s gfs_ncep_post ]] && rm -f gfs_ncep_post -$LINK ../sorc/upp.fd/exec/upp.x gfs_ncep_post +[[ -s "upp.x" ]] && rm -f upp.x +${LINK} "${script_dir}/upp.fd/exec/upp.x" . -if [ -d ${pwd}/gfs_wafs.fd ]; then +if [[ -d "${script_dir}/gfs_wafs.fd" ]]; then for wafsexe in \ wafs_awc_wafavn.x wafs_blending.x wafs_blending_0p25.x \ wafs_cnvgrib2.x wafs_gcip.x wafs_grib2_0p25.x \ wafs_makewafs.x wafs_setmissing.x; do - [[ -s $wafsexe ]] && rm -f $wafsexe - $LINK ../sorc/gfs_wafs.fd/exec/$wafsexe . + [[ -s ${wafsexe} ]] && rm -f "${wafsexe}" + ${LINK} "${script_dir}/gfs_wafs.fd/exec/${wafsexe}" . done fi for ufs_utilsexe in \ emcsfc_ice_blend emcsfc_snow2mdl global_cycle ; do - [[ -s $ufs_utilsexe ]] && rm -f $ufs_utilsexe - $LINK ../sorc/ufs_utils.fd/exec/$ufs_utilsexe . + [[ -s "${ufs_utilsexe}" ]] && rm -f "${ufs_utilsexe}" + ${LINK} "${script_dir}/ufs_utils.fd/exec/${ufs_utilsexe}" . done -for gsiexe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ - getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x global_enkf.x global_gsi.x \ - interp_inc.x ncdiag_cat.x oznmon_horiz.x oznmon_time.x radmon_angle.x \ - radmon_bcoef.x radmon_bcor.x radmon_time.x recentersigp.x;do - [[ -s $gsiexe ]] && rm -f $gsiexe - $LINK ../sorc/gsi.fd/exec/$gsiexe . -done +# GSI +if [[ -d "${script_dir}/gsi_enkf.fd" ]]; then + for gsiexe in enkf.x gsi.x; do + [[ -s "${gsiexe}" ]] && rm -f "${gsiexe}" + ${LINK} "${script_dir}/gsi_enkf.fd/install/bin/${gsiexe}" . + done +fi -for gldasexe in gdas2gldas gldas2gdas gldas_forcing gldas_model gldas_post gldas_rst; do - [[ -s $gldasexe ]] && rm -f $gldasexe - $LINK ../sorc/gldas.fd/exec/$gldasexe . -done +# GSI Utils +if [[ -d "${script_dir}/gsi_utils.fd" ]]; then + for exe in calc_analysis.x calc_increment_ens_ncio.x calc_increment_ens.x \ + getsfcensmeanp.x getsigensmeanp_smooth.x getsigensstatp.x \ + interp_inc.x recentersigp.x;do + [[ -s "${exe}" ]] && rm -f "${exe}" + ${LINK} "${script_dir}/gsi_utils.fd/install/bin/${exe}" . + done +fi + +# GSI Monitor +if [[ -d "${script_dir}/gsi_monitor.fd" ]]; then + for exe in oznmon_horiz.x oznmon_time.x radmon_angle.x \ + radmon_bcoef.x radmon_bcor.x radmon_time.x; do + [[ -s "${exe}" ]] && rm -f "${exe}" + ${LINK} "${script_dir}/gsi_monitor.fd/install/bin/${exe}" . + done +fi + +# GDASApp +if [[ -d "${script_dir}/gdas.cd" ]]; then + declare -a JEDI_EXE=("fv3jedi_addincrement.x" \ + "fv3jedi_diffstates.x" \ + "fv3jedi_ensvariance.x" \ + "fv3jedi_hofx.x" \ + "fv3jedi_var.x" \ + "fv3jedi_convertincrement.x" \ + "fv3jedi_dirac.x" \ + "fv3jedi_error_covariance_training.x" \ + "fv3jedi_letkf.x" \ + "fv3jedi_convertstate.x" \ + "fv3jedi_eda.x" \ + "fv3jedi_forecast.x" \ + "fv3jedi_plot_field.x" \ + "fv3jedi_data_checker.py" \ + "fv3jedi_enshofx.x" \ + "fv3jedi_hofx_nomodel.x" \ + "fv3jedi_testdata_downloader.py" \ + "soca_convertincrement.x" \ + "soca_error_covariance_training.x" \ + "soca_setcorscales.x" \ + "soca_gridgen.x" \ + "soca_var.x" \ + "calcfIMS.exe" \ + "apply_incr.exe" ) + for gdasexe in "${JEDI_EXE[@]}"; do + [[ -s "${gdasexe}" ]] && rm -f "${gdasexe}" + ${LINK} "${script_dir}/gdas.cd/build/bin/${gdasexe}" . + done +fi #------------------------------ #--link source code directories #------------------------------ +cd "${script_dir}" || exit 8 -cd ${pwd}/../sorc || exit 8 - [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd - $SLINK gsi.fd/util/netcdf_io/calc_analysis.fd calc_analysis.fd - - [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd - $SLINK gsi.fd/util/EnKF/gfs/src/calc_increment_ens.fd calc_increment_ens.fd + if [[ -d gsi_enkf.fd ]]; then + [[ -d gsi.fd ]] && rm -rf gsi.fd + ${SLINK} gsi_enkf.fd/src/gsi gsi.fd - [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd - $SLINK gsi.fd/util/EnKF/gfs/src/calc_increment_ens_ncio.fd calc_increment_ens_ncio.fd + [[ -d enkf.fd ]] && rm -rf enkf.fd + ${SLINK} gsi_enkf.fd/src/enkf enkf.fd + fi - [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd - $SLINK gsi.fd/util/EnKF/gfs/src/getsfcensmeanp.fd getsfcensmeanp.fd + if [[ -d gsi_utils.fd ]]; then + [[ -d calc_analysis.fd ]] && rm -rf calc_analysis.fd + ${SLINK} gsi_utils.fd/src/netcdf_io/calc_analysis.fd calc_analysis.fd - [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd - $SLINK gsi.fd/util/EnKF/gfs/src/getsigensmeanp_smooth.fd getsigensmeanp_smooth.fd + [[ -d calc_increment_ens.fd ]] && rm -rf calc_increment_ens.fd + ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens.fd calc_increment_ens.fd - [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd - $SLINK gsi.fd/util/EnKF/gfs/src/getsigensstatp.fd getsigensstatp.fd + [[ -d calc_increment_ens_ncio.fd ]] && rm -rf calc_increment_ens_ncio.fd + ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/calc_increment_ens_ncio.fd calc_increment_ens_ncio.fd - [[ -d global_enkf.fd ]] && rm -rf global_enkf.fd - $SLINK gsi.fd/src/enkf global_enkf.fd + [[ -d getsfcensmeanp.fd ]] && rm -rf getsfcensmeanp.fd + ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsfcensmeanp.fd getsfcensmeanp.fd - [[ -d global_gsi.fd ]] && rm -rf global_gsi.fd - $SLINK gsi.fd/src/gsi global_gsi.fd + [[ -d getsigensmeanp_smooth.fd ]] && rm -rf getsigensmeanp_smooth.fd + ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensmeanp_smooth.fd getsigensmeanp_smooth.fd - [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd - $SLINK gsi.fd/util/netcdf_io/interp_inc.fd interp_inc.fd + [[ -d getsigensstatp.fd ]] && rm -rf getsigensstatp.fd + ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/getsigensstatp.fd getsigensstatp.fd - [[ -d ncdiag.fd ]] && rm -rf ncdiag.fd - $SLINK gsi.fd/src/ncdiag ncdiag_cat.fd + [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd + ${SLINK} gsi_utils.fd/src/EnKF/gfs/src/recentersigp.fd recentersigp.fd - [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd - $SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd + [[ -d interp_inc.fd ]] && rm -rf interp_inc.fd + ${SLINK} gsi_utils.fd/src/netcdf_io/interp_inc.fd interp_inc.fd + fi - [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd - $SLINK gsi.fd/util/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd + if [[ -d gsi_monitor.fd ]] ; then + [[ -d oznmon_horiz.fd ]] && rm -rf oznmon_horiz.fd + ${SLINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_horiz.fd oznmon_horiz.fd - [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd + [[ -d oznmon_time.fd ]] && rm -rf oznmon_time.fd + ${SLINK} gsi_monitor.fd/src/Ozone_Monitor/nwprod/oznmon_shared/sorc/oznmon_time.fd oznmon_time.fd - [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd + [[ -d radmon_angle.fd ]] && rm -rf radmon_angle.fd + ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radang.fd radmon_angle.fd - [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd + [[ -d radmon_bcoef.fd ]] && rm -rf radmon_bcoef.fd + ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcoef.fd radmon_bcoef.fd - [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd - $SLINK gsi.fd/util/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd + [[ -d radmon_bcor.fd ]] && rm -rf radmon_bcor.fd + ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radbcor.fd radmon_bcor.fd - [[ -d recentersigp.fd ]] && rm -rf recentersigp.fd - $SLINK gsi.fd/util/EnKF/gfs/src/recentersigp.fd recentersigp.fd + [[ -d radmon_time.fd ]] && rm -rf radmon_time.fd + ${SLINK} gsi_monitor.fd/src/Radiance_Monitor/nwprod/radmon_shared/sorc/verf_radtime.fd radmon_time.fd + fi - $SLINK upp.fd/sorc/ncep_post.fd gfs_ncep_post.fd + [[ -d gfs_ncep_post.fd ]] && rm -rf gfs_ncep_post.fd + ${SLINK} upp.fd/sorc/ncep_post.fd gfs_ncep_post.fd for prog in fregrid make_hgrid make_solo_mosaic ; do - $SLINK ufs_utils.fd/sorc/fre-nctools.fd/tools/$prog ${prog}.fd + [[ -d "${prog}.fd" ]] && rm -rf "${prog}.fd" + ${SLINK} "ufs_utils.fd/sorc/fre-nctools.fd/tools/${prog}" "${prog}.fd" done - for prog in global_cycle.fd \ + for prog in global_cycle.fd \ emcsfc_ice_blend.fd \ emcsfc_snow2mdl.fd ;do - $SLINK ufs_utils.fd/sorc/$prog $prog + [[ -d "${prog}" ]] && rm -rf "${prog}" + ${SLINK} "ufs_utils.fd/sorc/${prog}" "${prog}" done + for prog in enkf_chgres_recenter.fd \ + enkf_chgres_recenter_nc.fd \ + fbwndgfs.fd \ + fv3nc2nemsio.fd \ + gaussian_sfcanl.fd \ + gfs_bufr.fd \ + mkgfsawps.fd \ + overgridid.fd \ + rdbfmsua.fd \ + reg2grb2.fd \ + regrid_nemsio.fd \ + supvit.fd \ + syndat_getjtbul.fd \ + syndat_maksynrc.fd \ + syndat_qctropcy.fd \ + tave.fd \ + tocsbufr.fd \ + vint.fd \ + webtitle.fd + do + if [[ -d "${prog}" ]]; then rm -rf "${prog}"; fi + ${LINK} "gfs_utils.fd/src/${prog}" . + done - if [ -d ${pwd}/gfs_wafs.fd ]; then - $SLINK gfs_wafs.fd/sorc/wafs_awc_wafavn.fd wafs_awc_wafavn.fd - $SLINK gfs_wafs.fd/sorc/wafs_blending.fd wafs_blending.fd - $SLINK gfs_wafs.fd/sorc/wafs_blending_0p25.fd wafs_blending_0p25.fd - $SLINK gfs_wafs.fd/sorc/wafs_cnvgrib2.fd wafs_cnvgrib2.fd - $SLINK gfs_wafs.fd/sorc/wafs_gcip.fd wafs_gcip.fd - $SLINK gfs_wafs.fd/sorc/wafs_grib2_0p25.fd wafs_grib2_0p25.fd - $SLINK gfs_wafs.fd/sorc/wafs_makewafs.fd wafs_makewafs.fd - $SLINK gfs_wafs.fd/sorc/wafs_setmissing.fd wafs_setmissing.fd + if [[ -d "${script_dir}/gfs_wafs.fd" ]]; then + ${SLINK} gfs_wafs.fd/sorc/wafs_awc_wafavn.fd wafs_awc_wafavn.fd + ${SLINK} gfs_wafs.fd/sorc/wafs_blending.fd wafs_blending.fd + ${SLINK} gfs_wafs.fd/sorc/wafs_blending_0p25.fd wafs_blending_0p25.fd + ${SLINK} gfs_wafs.fd/sorc/wafs_cnvgrib2.fd wafs_cnvgrib2.fd + ${SLINK} gfs_wafs.fd/sorc/wafs_gcip.fd wafs_gcip.fd + ${SLINK} gfs_wafs.fd/sorc/wafs_grib2_0p25.fd wafs_grib2_0p25.fd + ${SLINK} gfs_wafs.fd/sorc/wafs_makewafs.fd wafs_makewafs.fd + ${SLINK} gfs_wafs.fd/sorc/wafs_setmissing.fd wafs_setmissing.fd fi - for prog in gdas2gldas.fd gldas2gdas.fd gldas_forcing.fd gldas_model.fd gldas_post.fd gldas_rst.fd ;do - $SLINK gldas.fd/sorc/$prog $prog - done - #------------------------------ # copy $HOMEgfs/parm/config/config.base.nco.static as config.base for operations # config.base in the $HOMEgfs/parm/config has no use in development -cd $pwd/../parm/config -[[ -s config.base ]] && rm -f config.base -[[ $RUN_ENVIR = nco ]] && cp -p config.base.nco.static config.base +cd "${top_dir}/parm/config" || exit 1 +[[ -s "config.base" ]] && rm -f config.base +if [[ "${RUN_ENVIR}" == "nco" ]] ; then + cp -p config.base.nco.static config.base + cp -p config.fv3.nco.static config.fv3 + cp -p config.resources.nco.static config.resources +fi #------------------------------ +echo "${BASH_SOURCE[0]} completed successfully" exit 0 - diff --git a/sorc/machine-setup.sh b/sorc/machine-setup.sh deleted file mode 100644 index 5c8e7f900a..0000000000 --- a/sorc/machine-setup.sh +++ /dev/null @@ -1,189 +0,0 @@ -# Create a test function for sh vs. bash detection. The name is -# randomly generated to reduce the chances of name collision. -__ms_function_name="setup__test_function__$$" -eval "$__ms_function_name() { /bin/true ; }" - -# Determine which shell we are using -__ms_ksh_test=$( eval '__text="text" ; if [[ $__text =~ ^(t).* ]] ; then printf "%s" ${.sh.match[1]} ; fi' 2> /dev/null | cat ) -__ms_bash_test=$( eval 'if ( set | grep '$__ms_function_name' | grep -v name > /dev/null 2>&1 ) ; then echo t ; fi ' 2> /dev/null | cat ) - -if [[ ! -z "$__ms_ksh_test" ]] ; then - __ms_shell=ksh -elif [[ ! -z "$__ms_bash_test" ]] ; then - __ms_shell=bash -else - # Not bash or ksh, so assume sh. - __ms_shell=sh -fi - -target="" -USERNAME=$(echo $LOGNAME | awk '{ print tolower($0)'}) -##--------------------------------------------------------------------------- -export hname=$(hostname | cut -c 1,1) -if [[ -d /work ]] ; then - # We are on MSU Orion - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /apps/lmod/lmod/init/$__ms_shell - fi - target=orion - - module purge - - export myFC=mpiifort - export FCOMP=mpiifort - -##--------------------------------------------------------------------------- -elif [[ -d /scratch1 ]] ; then - # We are on NOAA Hera - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /apps/lmod/lmod/init/$__ms_shell - fi - target=hera - - module purge - - export myFC=mpiifort - export FCOMP=mpiifort - -##--------------------------------------------------------------------------- -elif [[ -d /gpfs/hps && -e /etc/SuSE-release ]] ; then - # We are on NOAA Luna or Surge - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /opt/modules/default/init/$__ms_shell - fi - - target=wcoss_cray - # Silence the "module purge" to avoid the expected error messages - # related to modules that load modules. - module purge > /dev/null 2>&1 - module use /usrx/local/prod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /opt/cray/alt-modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/modulefiles - module purge > /dev/null 2>&1 - # Workaround until module issues are fixed: - #unset _LMFILES_ - #unset LOADEDMODULES - echo y 2> /dev/null | module clear > /dev/null 2>&1 - module use /usrx/local/prod/modulefiles - module use /gpfs/hps/nco/ops/nwprod/lib/modulefiles - module use /gpfs/hps/nco/ops/nwprod/modulefiles - module use /opt/cray/alt-modulefiles - module use /opt/cray/craype/default/alt-modulefiles - module use /opt/cray/ari/modulefiles - module use /opt/modulefiles - module load modules - -##--------------------------------------------------------------------------- -elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then - # We are on NOAA Venus or Mars - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /usrx/local/prod/lmod/lmod/init/$__ms_shell - fi - target=wcoss_dell_p3 - module purge - -##--------------------------------------------------------------------------- -elif [[ -d /glade ]] ; then - # We are on NCAR Yellowstone - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - . /usr/share/Modules/init/$__ms_shell - fi - target=yellowstone - module purge - -##--------------------------------------------------------------------------- -elif [[ -d /lustre && -d /ncrc ]] ; then - # We are on GAEA. - # We are on GAEA. - echo gaea - if ( ! eval module help > /dev/null 2>&1 ) ; then - # We cannot simply load the module command. The GAEA - # /etc/profile modifies a number of module-related variables - # before loading the module command. Without those variables, - # the module command fails. Hence we actually have to source - # /etc/profile here. - source /etc/profile - __ms_source_etc_profile=yes - else - __ms_source_etc_profile=no - fi - module purge - module purge -# clean up after purge - unset _LMFILES_ - unset _LMFILES_000 - unset _LMFILES_001 - unset LOADEDMODULES - module load modules - if [[ -d /opt/cray/ari/modulefiles ]] ; then - module use -a /opt/cray/ari/modulefiles - fi - if [[ -d /opt/cray/pe/ari/modulefiles ]] ; then - module use -a /opt/cray/pe/ari/modulefiles - fi - if [[ -d /opt/cray/pe/craype/default/modulefiles ]] ; then - module use -a /opt/cray/pe/craype/default/modulefiles - fi - if [[ -s /etc/opt/cray/pe/admin-pe/site-config ]] ; then - source /etc/opt/cray/pe/admin-pe/site-config - fi - export NCEPLIBS=/lustre/f1/pdata/ncep_shared/NCEPLIBS/lib - if [[ -d "$NCEPLIBS" ]] ; then - module use $NCEPLIBS/modulefiles - fi - if [[ "$__ms_source_etc_profile" == yes ]] ; then - source /etc/profile - unset __ms_source_etc_profile - fi - -target=gaea - -# GWV ADD -module load craype -module load intel -export NCEPLIBS=/lustre/f2/dev/ncep/George.Vandenberghe/NEWCOPY/l508/lib/ -module use $NCEPLIBS/modulefiles -export myFC=ftn -export WRFPATH=$NCEPLIBS/wrf.shared.new/v1.1.1/src -export FCOMP=ftn -# END GWV ADD - -##--------------------------------------------------------------------------- -elif [[ -d /lfs4 ]] ; then - # We are on NOAA Jet - if ( ! eval module help > /dev/null 2>&1 ) ; then - echo load the module command 1>&2 - source /apps/lmod/lmod/init/$__ms_shell - fi - target=jet - module purge - module load intel/18.0.5.274 - module load impi/2018.4.274 - export NCEPLIBS=/mnt/lfs4/HFIP/hfv3gfs/nwprod/NCEPLIBS - #export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib - #export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/ljtjet/lib - #export NCEPLIBS=/mnt/lfs3/projects/hfv3gfs/gwv/NCEPLIBS.15X - module use $NCEPLIBS/modulefiles - export WRFPATH=$NCEPLIBS/wrf.shared.new/v1.1.1/src - export myFC=mpiifort - -##--------------------------------------------------------------------------- -else - echo WARNING: UNKNOWN PLATFORM 1>&2 -fi -##--------------------------------------------------------------------------- - -unset __ms_shell -unset __ms_ksh_test -unset __ms_bash_test -unset $__ms_function_name -unset __ms_function_name diff --git a/sorc/ncl.setup b/sorc/ncl.setup index c848fb9887..b4981689db 100644 --- a/sorc/ncl.setup +++ b/sorc/ncl.setup @@ -1,28 +1,12 @@ -if [ $target == wcoss_dell_p3 ] -then -module load NCL/6.4.0 -fi - -if [ $target == wcoss_cray ] -then -module load gcc/4.9.2 -module load NCL-gnu-haswell/6.3.0 -fi - -if [ $target == wcoss ] -then -module load ncarg/v6.1.0 -fi - -if [ $target == jet ] -then -module load ncl/6.5.0 -export NCARG_LIB=$NCARG_ROOT/lib -fi - -if [ $target == hera ] -then -module load ncl/6.5.0 -export NCARG_LIB=$NCARG_ROOT/lib -fi - +#!/bin/bash + +set +x +case ${target} in + 'jet'|'hera') + module load ncl/6.5.0 + export NCARG_LIB=${NCARG_ROOT}/lib + ;; + *) + echo "[${BASH_SOURCE[0]}]: unknown ${target}" + ;; +esac diff --git a/sorc/partial_build.sh b/sorc/partial_build.sh index 4e62333d9c..f212ae4cb4 100755 --- a/sorc/partial_build.sh +++ b/sorc/partial_build.sh @@ -4,144 +4,149 @@ # declare -a Build_prg=("Build_ufs_model" \ "Build_ww3_prepost" \ - "Build_gsi" \ + "Build_gsi_enkf" \ + "Build_gsi_utils" \ + "Build_gsi_monitor" \ "Build_ww3_prepost" \ - "Build_reg2grb2" \ - "Build_gldas" \ + "Build_gdas" \ "Build_upp" \ "Build_ufs_utils" \ "Build_gfs_wafs" \ - "Build_workflow_utils" \ - "Build_gfs_util") + "Build_gfs_utils") # # function parse_cfg: read config file and retrieve the values # parse_cfg() { - declare -i n - declare -i num_args - declare -i total_args - declare -a all_prg - total_args=$# - num_args=$1 - (( num_args == 0 )) && return 0 - config=$2 - [[ ${config,,} == "--verbose" ]] && config=$3 - all_prg=() - for (( n = num_args + 2; n <= total_args; n++ )); do - all_prg+=( ${!n} ) - done + declare -i n + declare -i num_args + declare -i total_args + declare -a all_prg + total_args=$# + num_args=$1 + (( num_args == 0 )) && return 0 + config=$2 + [[ ${config,,} == "--verbose" ]] && config=$3 + all_prg=() + for (( n = num_args + 2; n <= total_args; n++ )); do + all_prg+=( "${!n}" ) + done - if [[ ${config^^} == ALL ]]; then - # - # set all values to true - # - for var in "${Build_prg[@]}"; do - eval "$var=true" - done - elif [[ $config == config=* ]]; then - # - # process config file - # - cfg_file=${config#config=} - $verbose && echo "INFO: settings in config file: $cfg_file" - while read cline; do - # remove leading white space - clean_line="${cline#"${cline%%[![:space:]]*}"}" - ( [[ -z "$clean_line" ]] || [[ "${clean_line:0:1}" == "#" ]] ) || { - $verbose && echo $clean_line - first9=${clean_line:0:9} - [[ ${first9,,} == "building " ]] && { - short_prg=$(sed -e 's/.*(\(.*\)).*/\1/' <<< "$clean_line") - # remove trailing white space - clean_line="${cline%"${cline##*[![:space:]]}"}" - build_action=true - last5=${clean_line: -5} - [[ ${last5,,} == ". yes" ]] && build_action=true - last4=${clean_line: -4} - [[ ${last4,,} == ". no" ]] && build_action=false - found=false - for prg in ${all_prg[@]}; do - [[ $prg == "Build_"$short_prg ]] && { - found=true - eval "$prg=$build_action" - break - } - done - $found || { - echo "*** Unrecognized line in config file \"$cfg_file\":" 2>&1 - echo "$cline" 2>&1 - exit 3 - } - } - } - done < $cfg_file - elif [[ $config == select=* ]]; then - # - # set all values to (default) false - # - for var in "${Build_prg[@]}"; do - eval "$var=false" - done - # - # read command line partial build setting - # - del="" - sel_prg=${config#select=} - for separator in " " "," ";" ":" "/" "|"; do - [[ "${sel_prg/$separator}" == "$sel_prg" ]] || { - del=$separator - sel_prg=${sel_prg//$del/ } - } - done - [[ $del == "" ]] && { - short_prg=$sel_prg - found=false - for prg in ${all_prg[@]}; do - [[ $prg == "Build_"$short_prg ]] && { - found=true - eval "$prg=true" - break - } - done - $found || { - echo "*** Unrecognized program name \"$short_prg\" in command line" 2>&1 - exit 4 - } - } || { - for short_prg in $(echo ${sel_prg}); do - found=false - for prg in ${all_prg[@]}; do - [[ $prg == "Build_"$short_prg ]] && { - found=true - eval "$prg=true" - break - } - done - $found || { - echo "*** Unrecognized program name \"$short_prg\" in command line" 2>&1 - exit 5 - } - done - } - else - echo "*** Unrecognized command line option \"$config\"" 2>&1 - exit 6 - fi + if [[ ${config^^} == ALL ]]; then + # + # set all values to true + # + for var in "${Build_prg[@]}"; do + eval "${var}=true" + done + elif [[ ${config} == config=* ]]; then + # + # process config file + # + cfg_file=${config#config=} + ${verbose} && echo "INFO: settings in config file: ${cfg_file}" + while read -r cline; do + # remove leading white space + clean_line="${cline#"${cline%%[![:space:]]*}"}" + { [[ -z "${clean_line}" ]] || [[ "${clean_line:0:1}" == "#" ]]; } || { + ${verbose} && echo "${clean_line}" + first9=${clean_line:0:9} + [[ ${first9,,} == "building " ]] && { + # No shellcheck, this can't be replaced by a native bash substitute + # because it uses a regex + # shellcheck disable=SC2001 + short_prg=$(sed -e 's/.*(\(.*\)).*/\1/' <<< "${clean_line}") + # shellcheck disable= + # remove trailing white space + clean_line="${cline%"${cline##*[![:space:]]}"}" + build_action=true + last5=${clean_line: -5} + [[ ${last5,,} == ". yes" ]] && build_action=true + last4=${clean_line: -4} + [[ ${last4,,} == ". no" ]] && build_action=false + found=false + for prg in "${all_prg[@]}"; do + [[ ${prg} == "Build_${short_prg}" ]] && { + found=true + eval "${prg}=${build_action}" + break + } + done + ${found} || { + echo "*** Unrecognized line in config file \"${cfg_file}\":" 2>&1 + echo "${cline}" 2>&1 + exit 3 + } + } + } + done < "${cfg_file}" + elif [[ ${config} == select=* ]]; then + # + # set all values to (default) false + # + for var in "${Build_prg[@]}"; do + eval "${var}=false" + done + # + # read command line partial build setting + # + del="" + sel_prg=${config#select=} + for separator in " " "," ";" ":" "/" "|"; do + [[ "${sel_prg/${separator}}" == "${sel_prg}" ]] || { + del=${separator} + sel_prg=${sel_prg//${del}/ } + } + done + if [[ ${del} == "" ]]; then + { + short_prg=${sel_prg} + found=false + for prg in "${all_prg[@]}"; do + [[ ${prg} == "Build_${short_prg}" ]] && { + found=true + eval "${prg}=true" + break + } + done + ${found} || { + echo "*** Unrecognized program name \"${short_prg}\" in command line" 2>&1 + exit 4 + } + } || { + for short_prg in ${sel_prg}; do + found=false + for prg in "${all_prg[@]}"; do + [[ ${prg} == "Build_${short_prg}" ]] && { + found=true + eval "${prg}=true" + break + } + done + ${found} || { + echo "*** Unrecognized program name \"${short_prg}\" in command line" 2>&1 + exit 5 + } + done + } + fi + else + echo "*** Unrecognized command line option \"${config}\"" 2>&1 + exit 6 + fi } usage() { - cat <<- EOF 2>&1 - Usage: $BASH_SOURCE [-c config_file][-h][-v] - -h: - Print this help message and exit - -v: - Turn on verbose mode - -c config_file: - Override default config file to determine whether to build each program [default: gfs_build.cfg] - - EOF + cat << EOF 2>&1 +Usage: ${BASH_SOURCE[0]} [-c config_file][-h][-v] + -h: + Print this help message and exit + -v: + Turn on verbose mode + -c config_file: + Override default config file to determine whether to build each program [default: gfs_build.cfg] +EOF } @@ -153,43 +158,43 @@ verbose=false config_file="gfs_build.cfg" # Reset option counter for when this script is sourced OPTIND=1 -while getopts ":c:hs:v" option; do - case "${option}" in - c) config_file="${OPTARG}";; - h) usage;; - v) - verbose=true - parse_argv+=( "--verbose" ) - ;; - \?) - echo "[$BASH_SOURCE]: Unrecognized option: ${option}" - usage - ;; - :) - echo "[$BASH_SOURCE]: ${option} requires an argument" - usage - ;; - esac +while getopts ":c:h:v" option; do + case "${option}" in + c) config_file="${OPTARG}";; + h) usage;; + v) + verbose=true + parse_argv+=( "--verbose" ) + ;; + :) + echo "[${BASH_SOURCE[0]}]: ${option} requires an argument" + usage + ;; + *) + echo "[${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + usage + ;; + esac done shift $((OPTIND-1)) -parse_argv+=( "config=$config_file" ) +parse_argv+=( "config=${config_file}" ) # # call arguments retriever/config parser # -parse_cfg ${#parse_argv[@]} "${parse_argv[@]}" ${Build_prg[@]} +parse_cfg ${#parse_argv[@]} "${parse_argv[@]}" "${Build_prg[@]}" # # print values of build array # -$verbose && { - echo "INFO: partial build settings:" - for var in "${Build_prg[@]}"; do - echo -n " $var: " - ${!var} && echo True || echo False - done +${verbose} && { + echo "INFO: partial build settings:" + for var in "${Build_prg[@]}"; do + echo -n " ${var}: " + "${!var}" && echo True || echo False + done } echo "=== end of partial build setting ===" > /dev/null diff --git a/sorc/regrid_nemsio.fd/Makefile b/sorc/regrid_nemsio.fd/Makefile deleted file mode 100644 index 4a089699b4..0000000000 --- a/sorc/regrid_nemsio.fd/Makefile +++ /dev/null @@ -1,159 +0,0 @@ -#============================================================================== -# -# REGRID_NEMSIO Makefile -# -#============================================================================== - -#----------------------------------------------------------------------------- -# -- Parent make (calls child make) -- -#----------------------------------------------------------------------------- - -# ------------- -# General Rules -# ------------- - -SHELL=/bin/sh - -RM = /bin/rm -f -MKDIR = /bin/mkdir -p - -#------------ -# Include machine dependent compile & load options -#------------ - -MAKE_CONF = -include $(MAKE_CONF) - -# ------------- -# This makefile -# ------------- - -MAKE_FILE = Makefile - -# ----------- -# Load module -# ----------- - -EXE_FILE = regrid_nemsio - -# -------------------- -# Installing directory -# -------------------- - -INSTALL_DIR = ../../exec/ - -# -------- -# Log file -# -------- - -LOG_FILE = log.make.$(EXE_FILE) - -# --------------- -# Call child make -# --------------- - -"" : - @$(MAKE) -f $(MAKE_FILE) all - -# ------------ -# Make install -# ------------ - -install: - @echo - @echo '==== INSTALL =================================================' - @if [ -e $(INSTALL_DIR) ]; then \ - if [ ! -d $(INSTALL_DIR) ]; then \ - echo '### Fail to create installing directory ###' ;\ - echo '### Stop the installation ###' ;\ - exit ;\ - fi ;\ - else \ - echo " mkdir -p $(INSTALL_DIR)" ;\ - mkdir -p $(INSTALL_DIR) ;\ - fi - cp $(EXE_FILE) $(INSTALL_DIR) - @cd $(INSTALL_DIR) ; ls -l $(pwd)/$(EXE_FILE) - -#----------- -# Make clean -# ---------- - -clean: - @echo - @echo '==== CLEAN ===================================================' - - $(RM) $(EXE_FILE) *.o *.mod - - $(RM) log.make.$(EXE_FILE) - -#----------------------------------------------------------------------------- -# -- Child make -- -#----------------------------------------------------------------------------- - -# --------- -# Libraries -# --------- - -INCS = $(NETCDF_INCLUDE) -I$(NEMSIO_INC) -LIBS = $(NEMSIO_LIB) $(BACIO_LIB4) $(W3NCO_LIBd) $(SP_LIB4) $(NETCDF_LDFLAGS) - -# ------------ -# Source files -# ------------ - -SRCSF90 = \ - kinds.f90 \ - constants.f90 \ - physcons.f90 \ - mpi_interface.f90 \ - namelist_def.f90 \ - variable_interface.f90 \ - netcdfio_interface.f90 \ - interpolation_interface.f90 \ - gfs_nems_interface.f90 \ - fv3_interface.f90 - -SRCS = $(SRCSF77) $(SRCSF90) - -# ------------ -# Object files -# ------------ - -OBJS = ${SRCSF90:.f90=.o} ${SRCSF77:.f=.o} - -# ------------ -# Dependencies -# ------------ -MAKE_DEPEND = Makefile.dependency -include $(MAKE_DEPEND) - -# ----------------------- -# Default compiling rules -# ----------------------- - -.SUFFIXES : -.SUFFIXES : .F90 .f90 .f .c .o - -.f90.o : - @echo - @echo '---> Compiling $<' - $(F90) $(FCFFLAGS) $(INCS) $(OPTIMIZATION) $(DEBUG) -c $< - -.f.o : - @echo - @echo '---> Compiling $<' - $(F77) $(FCFFLAGS) $(OPTIMIZATION) $(DEBUG) -c $< - -# ------------------------ -# Call compiler and linker -# ------------------------ - -all: REGRID_NEMSIO - -REGRID_NEMSIO: $(OBJS) - $(LD) $(LDFLAGS) $(OBJS) $(INCS) main.f90 $(LIBS) -o $(EXE_FILE) > $(LOG_FILE) - -help: - @ echo "Available targets:" - @ echo " make creates executable" - @ echo " make install creates exec & places it in bin" - @ echo " make clean cleans objects, exec, and alien files" diff --git a/sorc/regrid_nemsio.fd/Makefile.dependency b/sorc/regrid_nemsio.fd/Makefile.dependency deleted file mode 100644 index 52f6e80077..0000000000 --- a/sorc/regrid_nemsio.fd/Makefile.dependency +++ /dev/null @@ -1,9 +0,0 @@ -kinds.o: kinds.f90 -constants.o: constants.f90 kinds.o -physcons.o: physcons.f90 kinds.o -variable_interface.o: variable_interface.f90 namelist_def.o physcons.o constants.o kinds.o -namelist_def.o: namelist_def.f90 mpi_interface.o kinds.o -netcdfio_interface.o: netcdfio_interface.f90 kinds.o -interpolation_interface.o: interpolation_interface.f90 constants.o kinds.o namelist_def.o netcdfio_interface.o -gfs_nems_interface.o: gfs_nems_interface.f90 variable_interface.o constants.o kinds.o mpi_interface.o namelist_def.o -fv3_interface.o: fv3_interface.f90 variable_interface.o interpolation_interface.o constants.o kinds.o mpi_interface.o namelist_def.o netcdfio_interface.o gfs_nems_interface.o diff --git a/sorc/regrid_nemsio.fd/constants.f90 b/sorc/regrid_nemsio.fd/constants.f90 deleted file mode 100644 index 8627358e2d..0000000000 --- a/sorc/regrid_nemsio.fd/constants.f90 +++ /dev/null @@ -1,314 +0,0 @@ -! this module was extracted from the GSI version operational -! at NCEP in Dec. 2007. -module constants -!$$$ module documentation block -! . . . . -! module: constants -! prgmmr: treadon org: np23 date: 2003-09-25 -! -! abstract: This module contains the definition of various constants -! used in the gsi code -! -! program history log: -! 2003-09-25 treadon - original code -! 2004-03-02 treadon - allow global and regional constants to differ -! 2004-06-16 treadon - update documentation -! 2004-10-28 treadon - replace parameter tiny=1.e-12 with tiny_r_kind -! and tiny_single -! 2004-11-16 treadon - add huge_single, huge_r_kind parameters -! 2005-01-27 cucurull - add ione -! 2005-08-24 derber - move cg_term to constants from qcmod -! 2006-03-07 treadon - add rd_over_cp_mass -! 2006-05-18 treadon - add huge_i_kind -! 2006-06-06 su - add var-qc wgtlim, change value to 0.25 (ECMWF) -! 2006-07-28 derber - add r1000 -! -! Subroutines Included: -! sub init_constants - compute derived constants, set regional/global constants -! -! Variable Definitions: -! see below -! -! attributes: -! language: f90 -! machine: ibm RS/6000 SP -! -!$$$ - use kinds, only: r_single,r_kind,i_kind - implicit none - -! Declare constants - integer(i_kind) izero,ione - real(r_kind) rearth,grav,omega,rd,rv,cp,cv,cvap,cliq - real(r_kind) csol,hvap,hfus,psat,t0c,ttp,jcal,cp_mass,cg_term - real(r_kind) fv,deg2rad,rad2deg,pi,tiny_r_kind,huge_r_kind,huge_i_kind - real(r_kind) ozcon,rozcon,tpwcon,rd_over_g,rd_over_cp,g_over_rd - real(r_kind) amsua_clw_d1,amsua_clw_d2,constoz,zero,one,two,four - real(r_kind) one_tenth,quarter,three,five,rd_over_cp_mass - real(r_kind) rearth_equator,stndrd_atmos_ps,r1000,stndrd_atmos_lapse - real(r_kind) semi_major_axis,semi_minor_axis,n_a,n_b - real(r_kind) eccentricity,grav_polar,grav_ratio - real(r_kind) grav_equator,earth_omega,grav_constant - real(r_kind) flattening,eccentricity_linear,somigliana - real(r_kind) dldt,dldti,hsub,psatk,tmix,xa,xai,xb,xbi - real(r_kind) eps,epsm1,omeps,wgtlim - real(r_kind) elocp,cpr,el2orc,cclimit,climit,epsq - real(r_kind) pcpeff0,pcpeff1,pcpeff2,pcpeff3,rcp,c0,delta - real(r_kind) h1000,factor1,factor2,rhcbot,rhctop,dx_max,dx_min,dx_inv - real(r_kind) h300,half,cmr,cws,ke2,row,rrow - real(r_single) zero_single,tiny_single,huge_single - real(r_single) rmw_mean_distance, roic_mean_distance - logical :: constants_initialized = .true. - - -! Define constants common to global and regional applications -! name value description units -! ---- ----- ----------- ----- - parameter(rearth_equator= 6.37813662e6_r_kind) ! equatorial earth radius (m) - parameter(omega = 7.2921e-5_r_kind) ! angular velocity of earth (1/s) - parameter(cp = 1.0046e+3_r_kind) ! specific heat of air @pressure (J/kg/K) - parameter(cvap = 1.8460e+3_r_kind) ! specific heat of h2o vapor (J/kg/K) - parameter(csol = 2.1060e+3_r_kind) ! specific heat of solid h2o (ice)(J/kg/K) - parameter(hvap = 2.5000e+6_r_kind) ! latent heat of h2o condensation (J/kg) - parameter(hfus = 3.3358e+5_r_kind) ! latent heat of h2o fusion (J/kg) - parameter(psat = 6.1078e+2_r_kind) ! pressure at h2o triple point (Pa) - parameter(t0c = 2.7315e+2_r_kind) ! temperature at zero celsius (K) - parameter(ttp = 2.7316e+2_r_kind) ! temperature at h2o triple point (K) - parameter(jcal = 4.1855e+0_r_kind) ! joules per calorie () - parameter(stndrd_atmos_ps = 1013.25e2_r_kind) ! 1976 US standard atmosphere ps (Pa) - -! Numeric constants - parameter(izero = 0) - parameter(ione = 1) - parameter(zero_single = 0.0_r_single) - parameter(zero = 0.0_r_kind) - parameter(one_tenth = 0.10_r_kind) - parameter(quarter= 0.25_r_kind) - parameter(one = 1.0_r_kind) - parameter(two = 2.0_r_kind) - parameter(three = 3.0_r_kind) - parameter(four = 4.0_r_kind) - parameter(five = 5.0_r_kind) - parameter(r1000 = 1000.0_r_kind) - -! Constants for gps refractivity - parameter(n_a=77.6_r_kind) !K/mb - parameter(n_b=3.73e+5_r_kind) !K^2/mb - -! Parameters below from WGS-84 model software inside GPS receivers. - parameter(semi_major_axis = 6378.1370e3_r_kind) ! (m) - parameter(semi_minor_axis = 6356.7523142e3_r_kind) ! (m) - parameter(grav_polar = 9.8321849378_r_kind) ! (m/s2) - parameter(grav_equator = 9.7803253359_r_kind) ! (m/s2) - parameter(earth_omega = 7.292115e-5_r_kind) ! (rad/s) - parameter(grav_constant = 3.986004418e14_r_kind) ! (m3/s2) - -! Derived geophysical constants - parameter(flattening = (semi_major_axis-semi_minor_axis)/semi_major_axis)!() - parameter(somigliana = & - (semi_minor_axis/semi_major_axis) * (grav_polar/grav_equator) - one)!() - parameter(grav_ratio = (earth_omega*earth_omega * & - semi_major_axis*semi_major_axis * semi_minor_axis) / grav_constant) !() - -! Derived thermodynamic constants - parameter ( dldti = cvap-csol ) - parameter ( hsub = hvap+hfus ) - parameter ( psatk = psat*0.001_r_kind ) - parameter ( tmix = ttp-20._r_kind ) - parameter ( elocp = hvap/cp ) - parameter ( rcp = one/cp ) - -! Constants used in GFS moist physics - parameter ( h300 = 300._r_kind ) - parameter ( half = 0.5_r_kind ) - parameter ( cclimit = 0.001_r_kind ) - parameter ( climit = 1.e-20_r_kind) - parameter ( epsq = 2.e-12_r_kind ) - parameter ( h1000 = 1000.0_r_kind) - parameter ( rhcbot=0.85_r_kind ) - parameter ( rhctop=0.85_r_kind ) - parameter ( dx_max=-8.8818363_r_kind ) - parameter ( dx_min=-5.2574954_r_kind ) - parameter ( dx_inv=one/(dx_max-dx_min) ) - parameter ( c0=0.002_r_kind ) - parameter ( delta=0.6077338_r_kind ) - parameter ( pcpeff0=1.591_r_kind ) - parameter ( pcpeff1=-0.639_r_kind ) - parameter ( pcpeff2=0.0953_r_kind ) - parameter ( pcpeff3=-0.00496_r_kind ) - parameter ( cmr = one/0.0003_r_kind ) - parameter ( cws = 0.025_r_kind ) - parameter ( ke2 = 0.00002_r_kind ) - parameter ( row = 1000._r_kind ) - parameter ( rrow = one/row ) - -! Constant used to process ozone - parameter ( constoz = 604229.0_r_kind) - -! Constants used in cloud liquid water correction for AMSU-A -! brightness temperatures - parameter ( amsua_clw_d1 = 0.754_r_kind ) - parameter ( amsua_clw_d2 = -2.265_r_kind ) - -! Constants used for variational qc - parameter ( wgtlim = 0.25_r_kind) ! Cutoff weight for concluding that obs has been - ! rejected by nonlinear qc. This limit is arbitrary - ! and DOES NOT affect nonlinear qc. It only affects - ! the printout which "counts" the number of obs that - ! "fail" nonlinear qc. Observations counted as failing - ! nonlinear qc are still assimilated. Their weight - ! relative to other observations is reduced. Changing - ! wgtlim does not alter the analysis, only - ! the nonlinear qc data "count" - -! Constants describing the Extended Best-Track Reanalysis [Demuth et -! al., 2008] tropical cyclone (TC) distance for regions relative to TC -! track position; units are in kilometers - - parameter (rmw_mean_distance = 64.5479412) - parameter (roic_mean_distance = 338.319656) - -contains - subroutine init_constants_derived -!$$$ subprogram documentation block -! . . . . -! subprogram: init_constants_derived set derived constants -! prgmmr: treadon org: np23 date: 2004-12-02 -! -! abstract: This routine sets derived constants -! -! program history log: -! 2004-12-02 treadon -! 2005-03-03 treadon - add implicit none -! -! input argument list: -! -! output argument list: -! -! attributes: -! language: f90 -! machine: ibm rs/6000 sp -! -!$$$ - implicit none - -! Trigonometric constants - pi = acos(-one) - deg2rad = pi/180.0_r_kind - rad2deg = one/deg2rad - cg_term = (sqrt(two*pi))/two ! constant for variational qc - tiny_r_kind = tiny(zero) - huge_r_kind = huge(zero) - tiny_single = tiny(zero_single) - huge_single = huge(zero_single) - huge_i_kind = huge(izero) - -! Geophysical parameters used in conversion of geopotential to -! geometric height - eccentricity_linear = sqrt(semi_major_axis**2 - semi_minor_axis**2) - eccentricity = eccentricity_linear / semi_major_axis - constants_initialized = .true. - - return - end subroutine init_constants_derived - - subroutine init_constants(regional) -!$$$ subprogram documentation block -! . . . . -! subprogram: init_constants set regional or global constants -! prgmmr: treadon org: np23 date: 2004-03-02 -! -! abstract: This routine sets constants specific to regional or global -! applications of the gsi -! -! program history log: -! 2004-03-02 treadon -! 2004-06-16 treadon, documentation -! 2004-10-28 treadon - use intrinsic TINY function to set value -! for smallest machine representable positive -! number -! 2004-12-03 treadon - move derived constants to init_constants_derived -! 2005-03-03 treadon - add implicit none -! -! input argument list: -! regional - if .true., set regional gsi constants; -! otherwise (.false.), use global constants -! -! output argument list: -! -! attributes: -! language: f90 -! machine: ibm rs/6000 sp -! -!$$$ - implicit none - logical regional - real(r_kind) reradius,g,r_d,r_v,cliq_wrf - - stndrd_atmos_lapse = 0.0065 - -! Define regional constants here - if (regional) then - -! Name given to WRF constants - reradius = one/6370.e03_r_kind - g = 9.81_r_kind - r_d = 287.04_r_kind - r_v = 461.6_r_kind - cliq_wrf = 4190.0_r_kind - cp_mass = 1004.67_r_kind - -! Transfer WRF constants into unified GSI constants - rearth = one/reradius - grav = g - rd = r_d - rv = r_v - cv = cp-r_d - cliq = cliq_wrf - rd_over_cp_mass = rd / cp_mass - -! Define global constants here - else - rearth = 6.3712e+6_r_kind - grav = 9.80665e+0_r_kind - rd = 2.8705e+2_r_kind - rv = 4.6150e+2_r_kind - cv = 7.1760e+2_r_kind - cliq = 4.1855e+3_r_kind - cp_mass= zero - rd_over_cp_mass = zero - endif - - -! Now define derived constants which depend on constants -! which differ between global and regional applications. - -! Constants related to ozone assimilation - ozcon = grav*21.4e-9_r_kind - rozcon= one/ozcon - -! Constant used in vertical integral for precipitable water - tpwcon = 100.0_r_kind/grav - -! Derived atmospheric constants - fv = rv/rd-one ! used in virtual temperature equation - dldt = cvap-cliq - xa = -(dldt/rv) - xai = -(dldti/rv) - xb = xa+hvap/(rv*ttp) - xbi = xai+hsub/(rv*ttp) - eps = rd/rv - epsm1 = rd/rv-one - omeps = one-eps - factor1 = (cvap-cliq)/rv - factor2 = hvap/rv-factor1*t0c - cpr = cp*rd - el2orc = hvap*hvap/(rv*cp) - rd_over_g = rd/grav - rd_over_cp = rd/cp - g_over_rd = grav/rd - - return - end subroutine init_constants - -end module constants diff --git a/sorc/regrid_nemsio.fd/fv3_interface.f90 b/sorc/regrid_nemsio.fd/fv3_interface.f90 deleted file mode 100644 index bbe558e428..0000000000 --- a/sorc/regrid_nemsio.fd/fv3_interface.f90 +++ /dev/null @@ -1,779 +0,0 @@ -module fv3_interface - - !======================================================================= - - ! Define associated modules and subroutines - - !----------------------------------------------------------------------- - - use constants - - !----------------------------------------------------------------------- - - use gfs_nems_interface - use interpolation_interface - use mpi_interface - use namelist_def - use netcdfio_interface - use variable_interface - use nemsio_module - - !----------------------------------------------------------------------- - - implicit none - - !----------------------------------------------------------------------- - - ! Define all data and structure types for routine; these variables - ! are variables required by the subroutines within this module - - type analysis_grid - character(len=500) :: filename - character(len=500) :: filename2d - integer :: nx - integer :: ny - integer :: nz - integer :: ntime - end type analysis_grid ! type analysis_grid - - ! Define global variables - - integer n2dvar,n3dvar,ntvars,nrecs,nvvars - real(nemsio_realkind), dimension(:,:,:,:), allocatable :: fv3_var_3d - real(nemsio_realkind), dimension(:,:,:), allocatable :: fv3_var_2d - - !----------------------------------------------------------------------- - - ! Define interfaces and attributes for module routines - - private - public :: fv3_regrid_nemsio - - !----------------------------------------------------------------------- - -contains - - !----------------------------------------------------------------------- - - subroutine fv3_regrid_nemsio() - - ! Define variables computed within routine - - implicit none - type(analysis_grid) :: anlygrd(ngrids) - type(varinfo), allocatable, dimension(:) :: var_info,var_info2d,var_info3d - type(gfs_grid) :: gfs_grid - type(gridvar) :: invar,invar2 - type(gridvar) :: outvar,outvar2 - type(nemsio_meta) :: meta_nemsio2d, meta_nemsio3d - - type(esmfgrid) :: grid_bilin - type(esmfgrid) :: grid_nn - - character(len=20) :: var_name - character(len=20) :: nems_name - character(len=20) :: nems_levtyp - character(len=20) :: itrptyp - logical :: itrp_bilinear - logical :: itrp_nrstnghbr - real(nemsio_realkind), dimension(:,:), allocatable :: workgrid - real(nemsio_realkind), dimension(:), allocatable :: pk - real(nemsio_realkind), dimension(:), allocatable :: bk - real, dimension(:), allocatable :: sendbuffer,recvbuffer - integer :: fhour - integer :: ncoords - integer nems_lev,ndims,istatus,ncol,levs_fix - logical clip - - ! Define counting variables - - integer :: i, j, k, l,nlev,k2,k3,nrec - - !===================================================================== - - ! Define local variables - - call init_constants_derived() - call gfs_grid_initialize(gfs_grid) - - ! Loop through local variables - - if(mpi_procid .eq. mpi_masternode) then - print *,'variable table' - print *,'--------------' - open(912,file=trim(variable_table),form='formatted') - ntvars=0; n2dvar=0; n3dvar=0 - nrecs = 0 - loop_read: do while (istatus == 0) - read(912,199,iostat=istatus) var_name,nems_name,nems_levtyp,nems_lev,itrptyp,clip,ndims - if( istatus /= 0 ) exit loop_read - nrecs = nrecs + 1 - if(var_name(1:1) .ne. "#") then - ntvars = ntvars + 1 - ntvars = ntvars + 1 - if (ndims == 2) then - n2dvar = n2dvar+1 - else if (ndims == 3) then - n3dvar = n3dvar+1 - else - print *,'ndims must be 2 or 3 in variable_table.txt' - call mpi_abort(mpi_comm_world,-91,mpi_ierror) - stop - endif - !print *,'ntvars,n2dvar,n3dvar',ntvars,n2dvar,n3dvar - !write(6,199) var_name, nems_name,nems_levtyp,nems_lev,itrptyp,clip,ndims - endif - enddo loop_read - close(912) - print *,'nrecs,ntvars,n2dvar,n3dvar',nrecs,ntvars,n2dvar,n3dvar - endif - call mpi_bcast(nrecs,1,mpi_integer,mpi_masternode,mpi_comm_world,mpi_ierror) - call mpi_bcast(n2dvar,1,mpi_integer,mpi_masternode,mpi_comm_world,mpi_ierror) - call mpi_bcast(n3dvar,1,mpi_integer,mpi_masternode,mpi_comm_world,mpi_ierror) - call mpi_bcast(ntvars,1,mpi_integer,mpi_masternode,mpi_comm_world,mpi_ierror) - if (ntvars == 0) then - print *,'empty variable_table.txt!' - call mpi_interface_terminate() - stop - endif - allocate(var_info(ntvars)) - open(912,file=trim(variable_table),form='formatted') - k = 0 - nvvars = 0 ! number of vector variables - do nrec = 1, nrecs - read(912,199,iostat=istatus) var_name,nems_name,nems_levtyp,nems_lev,itrptyp,clip,ndims - if (var_name(1:1) .ne. "#") then - k = k + 1 - var_info(k)%var_name = var_name - var_info(k)%nems_name = nems_name - var_info(k)%nems_levtyp = nems_levtyp - var_info(k)%nems_lev = nems_lev - var_info(k)%itrptyp = itrptyp - if (itrptyp.EQ.'vector') then - nvvars=nvvars+1 - endif - var_info(k)%clip = clip - var_info(k)%ndims = ndims - if(mpi_procid .eq. mpi_masternode) then - write(6,199) var_info(k)%var_name, var_info(k)%nems_name,var_info(k)%nems_levtyp, & - var_info(k)%nems_lev,var_info(k)%itrptyp,var_info(k)%clip,var_info(k)%ndims - endif - endif - end do ! do k = 1, ntvars - ! assume vectors are in pairs - nvvars=nvvars/2 - call mpi_bcast(nvvars,1,mpi_integer,mpi_masternode,mpi_comm_world,mpi_ierror) - close(912) -199 format(a20,1x,a20,1x,a20,1x,i1,1x,a20,1x,l1,1x,i1) - allocate(var_info3d(n3dvar+2)) - allocate(var_info2d(n2dvar)) - k2 = 0 - k3 = 0 - do k=1,ntvars - if (var_info(k)%ndims == 2) then - k2 = k2 + 1 - var_info2d(k2) = var_info(k) - endif - if (var_info(k)%ndims == 3 .or. & - trim(var_info(k)%nems_name) == 'pres' .or. & - trim(var_info(k)%nems_name) == 'orog') then - k3 = k3 + 1 - var_info3d(k3) = var_info(k) - ! orography called 'hgt' in 3d file, not 'orog' - if (trim(var_info(k)%nems_name) == 'orog') then - var_info3d(k3)%nems_name = 'hgt ' - endif - endif - enddo - - - do i = 1, ngrids - anlygrd(i)%filename = analysis_filename(i) - anlygrd(i)%filename2d = analysis_filename2d(i) - call fv3_regrid_initialize(anlygrd(i)) - end do ! do i = 1, ngrids - - ! Define local variables - - ncxdim = anlygrd(1)%nx - ncydim = anlygrd(1)%ny - if (n3dvar > 0) then - nczdim = anlygrd(1)%nz - else - nczdim = 0 - endif - nctdim = anlygrd(1)%ntime - ncoords = ncxdim*ncydim - invar%ncoords = ncoords*ngrids - invar2%ncoords = ncoords*ngrids - outvar%ncoords = gfs_grid%ncoords - outvar2%ncoords = gfs_grid%ncoords - call interpolation_initialize_gridvar(invar) - call interpolation_initialize_gridvar(invar2) - call interpolation_initialize_gridvar(outvar) - call interpolation_initialize_gridvar(outvar2) - meta_nemsio3d%modelname = 'GFS' - meta_nemsio3d%version = 200509 - meta_nemsio3d%nrec = 2 + nczdim*n3dvar - meta_nemsio3d%nmeta = 5 - meta_nemsio3d%nmetavari = 3 - meta_nemsio3d%nmetaaryi = 1 - meta_nemsio3d%dimx = gfs_grid%nlons - meta_nemsio3d%dimy = gfs_grid%nlats - meta_nemsio3d%dimz = nczdim - meta_nemsio3d%jcap = ntrunc - meta_nemsio3d%nsoil = 4 - meta_nemsio3d%nframe = 0 - meta_nemsio3d%ntrac = 3 - meta_nemsio3d%idrt = 4 - meta_nemsio3d%ncldt = 3 - meta_nemsio3d%idvc = 2 - meta_nemsio3d%idvm = 2 - meta_nemsio3d%idsl = 1 - meta_nemsio3d%idate(1:6) = 0 - meta_nemsio3d%idate(7) = 1 - read(forecast_timestamp(9:10),'(i2)') meta_nemsio3d%idate(4) - read(forecast_timestamp(7:8), '(i2)') meta_nemsio3d%idate(3) - read(forecast_timestamp(5:6), '(i2)') meta_nemsio3d%idate(2) - read(forecast_timestamp(1:4), '(i4)') meta_nemsio3d%idate(1) - meta_nemsio2d = meta_nemsio3d - meta_nemsio2d%nrec = n2dvar - call mpi_barrier(mpi_comm_world,mpi_ierror) - call gfs_nems_meta_initialization(meta_nemsio2d,var_info2d,gfs_grid) - call gfs_nems_meta_initialization(meta_nemsio3d,var_info3d,gfs_grid) - - ! Allocate memory for local variables - - if(.not. allocated(fv3_var_2d) .and. n2dvar > 0) & - & allocate(fv3_var_2d(ngrids,ncxdim,ncydim)) - if (mpi_nprocs /= nczdim+1) then - call mpi_barrier(mpi_comm_world, mpi_ierror) - if (mpi_procid .eq. mpi_masternode) then - print *,'number of mpi tasks must be equal to number of levels + 1' - print *,'mpi procs = ',mpi_nprocs,' levels = ',nczdim - endif - call mpi_interface_terminate() - stop - endif - !print *,'allocate fv3_var_3d',ngrids,ncxdim,ncydim,nczdim,mpi_procid - if(.not. allocated(fv3_var_3d) .and. n3dvar > 0) & - & allocate(fv3_var_3d(ngrids,ncxdim,ncydim,nczdim)) - !print *,'done allocating fv3_var_3d',ngrids,ncxdim,ncydim,nczdim,mpi_procid - - ! Check local variable and proceed accordingly - - call mpi_barrier(mpi_comm_world,mpi_ierror) - if(mpi_procid .eq. mpi_masternode) then - - ! Allocate memory for local variables - - if (n3dvar > 0) then - if(.not. allocated(pk)) allocate(pk(nczdim+1)) - if(.not. allocated(bk)) allocate(bk(nczdim+1)) - - ! Define local variables - - if (trim(gfs_hyblevs_filename) == 'NOT USED' ) then - call netcdfio_values_1d(anlygrd(1)%filename,'pk',pk) - call netcdfio_values_1d(anlygrd(1)%filename,'bk',bk) - else - open(913,file=trim(gfs_hyblevs_filename),form='formatted') - read(913,*) ncol, levs_fix - if (levs_fix /= (nczdim+1) ) then - call mpi_barrier(mpi_comm_world, mpi_ierror) - print *,'levs in ', trim(gfs_hyblevs_filename), ' not equal to',(nczdim+1) - call mpi_interface_terminate() - stop - endif - do k=nczdim+1,1,-1 - read(913,*) pk(k),bk(k) - enddo - close(913) - endif - if (minval(pk) < -1.e10 .or. minval(bk) < -1.e10) then - print *,'pk,bk not found in netcdf file..' - meta_nemsio3d%vcoord = -9999._nemsio_realkind - else - ! Loop through local variable - - do k = 1, nczdim + 1 - - ! Define local variables - - meta_nemsio3d%vcoord((nczdim + 1) - k + 1,1,1) = pk(k) - meta_nemsio3d%vcoord((nczdim + 1) - k + 1,2,1) = bk(k) - - end do ! do k = 1, nczdim + 1 - endif - endif - - end if ! if(mpi_procid .eq. mpi_masternode) - - ! initialize/read in interpolation weight - - grid_bilin%filename = esmf_bilinear_filename - call interpolation_initialize_esmf(grid_bilin) - - grid_nn%filename = esmf_neareststod_filename - call interpolation_initialize_esmf(grid_nn) - - do l = 1, nctdim - - ncrec = l ! time level to read from netcdf file - - ! Define local variables - - call fv3_grid_fhour(anlygrd(1),meta_nemsio2d%nfhour) - call fv3_grid_fhour(anlygrd(1),meta_nemsio3d%nfhour) - meta_nemsio3d%nfminute = int(0.0) - meta_nemsio3d%nfsecondn = int(0.0) - meta_nemsio3d%nfsecondd = int(1.0) - meta_nemsio3d%fhour = meta_nemsio3d%nfhour - meta_nemsio2d%nfminute = int(0.0) - meta_nemsio2d%nfsecondn = int(0.0) - meta_nemsio2d%nfsecondd = int(1.0) - meta_nemsio2d%fhour = meta_nemsio2d%nfhour - - ! initialize nemsio file. - if(mpi_procid .eq. mpi_masternode) then - call gfs_nems_initialize(meta_nemsio2d, meta_nemsio3d) - end if - - ! wait here. - call mpi_barrier(mpi_comm_world,mpi_ierror) - - ! Loop through local variables - k2=1 - do k = 1, ntvars - nvvars - - ! Define local variables - - itrp_bilinear = .false. - itrp_nrstnghbr = .false. - - ! Do 2D variables. - - if(var_info(k2)%ndims .eq. 2) then - - ! Check local variable and proceed accordingly - - if(mpi_procid .eq. mpi_masternode) then - - ! Check local variable and proceed accordingly - - call fv3_grid_read(anlygrd(1:ngrids), var_info(k2)%var_name,.true.,.false.) - - call interpolation_define_gridvar(invar,ncxdim,ncydim, ngrids,fv3_var_2d) - if (trim(var_info(k2)%nems_name) == 'pres') then - ! interpolate in exner(pressure) - invar%var = (invar%var/stndrd_atmos_ps)**(rd_over_g*stndrd_atmos_lapse) - end if - - if(var_info(k2)%itrptyp .eq. 'bilinear') then - call interpolation_esmf(invar,outvar,grid_bilin, .false.) - end if - - if(var_info(k2)%itrptyp .eq. 'nrstnghbr') then - call interpolation_esmf(invar,outvar,grid_nn, .true.) - end if - - if (trim(var_info(k2)%nems_name) == 'pres') then - outvar%var = stndrd_atmos_ps*(outvar%var**(g_over_rd/stndrd_atmos_lapse)) - end if - - if(var_info(k2)%itrptyp .eq. 'vector') then - ! read in u winds - call fv3_grid_read(anlygrd(1:ngrids), var_info(k2)%var_name,.true.,.false.) - call interpolation_define_gridvar(invar,ncxdim,ncydim,ngrids,fv3_var_2d) - ! read in v winds - call fv3_grid_read(anlygrd(1:ngrids), var_info(k2+1)%var_name,.true.,.false.) - call interpolation_define_gridvar(invar2,ncxdim,ncydim,ngrids,fv3_var_2d) - call interpolation_esmf_vect(invar,invar2,grid_bilin,outvar,outvar2) - end if - - ! Clip variable to zero if desired. - if(var_info(k2)%clip) call variable_clip(outvar%var) - - ! Write to NEMSIO file. - call gfs_nems_write('2d',real(outvar%var), & - var_info(k2)%nems_name,var_info(k2)%nems_levtyp,var_info(k2)%nems_lev) - if (trim(var_info(k2)%nems_name) == 'pres' .or. & - trim(var_info(k2)%nems_name) == 'orog' .or. & - trim(var_info(k2)%nems_name) == 'hgt') then - ! write surface height and surface pressure to 3d file. - ! (surface height called 'orog' in nemsio bin4, 'hgt' in - ! grib) - if (trim(var_info(k2)%nems_name) == 'orog') then - call gfs_nems_write('3d',real(outvar%var), & - 'hgt ',var_info(k2)%nems_levtyp,1) - else - call gfs_nems_write('3d',real(outvar%var), & - var_info(k2)%nems_name,var_info(k2)%nems_levtyp,1) - endif - endif - if(var_info(k2)%itrptyp .eq. 'vector') then ! write v winds - call gfs_nems_write('2d',real(outvar2%var), & - var_info(k2+1)%nems_name,var_info(k2+1)%nems_levtyp,var_info(k2+1)%nems_lev) - endif - end if ! if(mpi_procid .eq. mpi_masternode) - - ! Define local variables - call mpi_barrier(mpi_comm_world,mpi_ierror) - - end if ! if(var_info(k2)%ndims .eq. 2) - - ! Do 3D variables. - - if(var_info(k2)%ndims .eq. 3) then - - ! read 3d grid on master node, send to other tasks - if(mpi_procid .eq. mpi_masternode) then - call fv3_grid_read(anlygrd(1:ngrids), var_info(k2)%var_name,.false.,.true.) - do nlev=1,nczdim - call mpi_send(fv3_var_3d(1,1,1,nlev),ngrids*ncxdim*ncydim,mpi_real,& - nlev,1,mpi_comm_world,mpi_errorstatus,mpi_ierror) - enddo - if(trim(adjustl(var_info(k2)%itrptyp)) .eq. 'vector') then ! winds - call mpi_barrier(mpi_comm_world,mpi_ierror) - call fv3_grid_read(anlygrd(1:ngrids), var_info(k2+1)%var_name,.false.,.true.) - do nlev=1,nczdim - call mpi_send(fv3_var_3d(1,1,1,nlev),ngrids*ncxdim*ncydim,mpi_real,& - nlev,1,mpi_comm_world,mpi_errorstatus,mpi_ierror) - enddo - endif - else if (mpi_procid .le. nczdim) then - ! do interpolation, one level on each task. - call mpi_recv(fv3_var_3d(1,1,1,mpi_procid),ngrids*ncxdim*ncydim,mpi_real,& - 0,1,mpi_comm_world,mpi_errorstatus,mpi_ierror) - - call interpolation_define_gridvar(invar,ncxdim,ncydim, ngrids,fv3_var_3d(:,:,:,mpi_procid)) - - if(var_info(k2)%itrptyp .eq. 'bilinear') then - call interpolation_esmf(invar,outvar,grid_bilin, .false.) - end if ! if(var_info(k2)%itrptyp .eq. 'bilinear') - - if(var_info(k2)%itrptyp .eq. 'nrstnghbr') then - call interpolation_esmf(invar,outvar,grid_nn, .true.) - end if ! if(var_info(k2)%itrptyp .eq. 'nrstnghbr') - - if(trim(adjustl(var_info(k2)%itrptyp)) .eq. 'vector') then ! winds - call mpi_barrier(mpi_comm_world,mpi_ierror) - call mpi_recv(fv3_var_3d(1,1,1,mpi_procid),ngrids*ncxdim*ncydim,mpi_real,& - 0,1,mpi_comm_world,mpi_errorstatus,mpi_ierror) - call interpolation_define_gridvar(invar2,ncxdim,ncydim,ngrids,fv3_var_3d(:,:,:,mpi_procid)) - call interpolation_esmf_vect(invar,invar2,grid_bilin,outvar,outvar2) - endif - - if(var_info(k2)%clip) call variable_clip(outvar%var(:)) - - end if ! if(mpi_procid .ne. mpi_masternode .and. & - ! mpi_procid .le. nczdim) - - ! gather results back on root node to write out. - - if (mpi_procid == mpi_masternode) then - ! receive one level of interpolated data on root task. - if (.not. allocated(workgrid)) allocate(workgrid(gfs_grid%ncoords,nczdim)) - if (.not. allocated(recvbuffer)) allocate(recvbuffer(gfs_grid%ncoords)) - do nlev=1,nczdim - call mpi_recv(recvbuffer,gfs_grid%ncoords,mpi_real,& - nlev,1,mpi_comm_world,mpi_errorstatus,mpi_ierror) - workgrid(:,nlev) = recvbuffer - enddo - deallocate(recvbuffer) - else - ! send one level of interpolated data to root task. - if (.not. allocated(sendbuffer)) allocate(sendbuffer(gfs_grid%ncoords)) - sendbuffer(:) = outvar%var(:) - call mpi_send(sendbuffer,gfs_grid%ncoords,mpi_real,& - 0,1,mpi_comm_world,mpi_errorstatus,mpi_ierror) - endif - - ! Write to NEMSIO file. - - if(mpi_procid .eq. mpi_masternode) then - - ! Loop through local variable - - do j = 1, nczdim - - ! Define local variables - - call gfs_nems_write('3d',workgrid(:,nczdim - j + 1), & - & var_info(k2)%nems_name,var_info(k2)%nems_levtyp, & - & j) - - end do ! do j = 1, nczdim - - end if ! if(mpi_procid .eq. mpi_masternode) - - if(trim(adjustl(var_info(k2)%itrptyp)) .eq. 'vector') then ! winds - if (mpi_procid == mpi_masternode) then - ! receive one level of interpolated data on root task. - if (.not. allocated(workgrid)) allocate(workgrid(gfs_grid%ncoords,nczdim)) - if (.not. allocated(recvbuffer)) allocate(recvbuffer(gfs_grid%ncoords)) - do nlev=1,nczdim - call mpi_recv(recvbuffer,gfs_grid%ncoords,mpi_real,& - nlev,1,mpi_comm_world,mpi_errorstatus,mpi_ierror) - workgrid(:,nlev) = recvbuffer - enddo - deallocate(recvbuffer) - else - ! send one level of interpolated data to root task. - if (.not. allocated(sendbuffer)) allocate(sendbuffer(gfs_grid%ncoords)) - sendbuffer(:) = outvar2%var(:) - call mpi_send(sendbuffer,gfs_grid%ncoords,mpi_real,& - 0,1,mpi_comm_world,mpi_errorstatus,mpi_ierror) - endif - - ! Write to NEMSIO file. - - if(mpi_procid .eq. mpi_masternode) then - - do j = 1, nczdim - - call gfs_nems_write('3d',workgrid(:,nczdim - j + 1), & - & var_info(k2+1)%nems_name,var_info(k2+1)%nems_levtyp, & - & j) - end do ! do j = 1, nczdim - - end if ! if(mpi_procid .eq. mpi_masternode) - endif - - ! wait here - - call mpi_barrier(mpi_comm_world,mpi_ierror) - - end if ! if(var_info(k2)%ndims .eq. 3) - if(var_info(k2)%itrptyp .eq. 'vector') then ! skip v record here - k2=k2+1 - endif - k2=k2+1 - end do ! do k = 1, ntvars - - ! Wait here. - - call mpi_barrier(mpi_comm_world,mpi_ierror) - - ! Finalize and cleanup - - if(mpi_procid .eq. mpi_masternode) then - call gfs_nems_finalize() - end if - call mpi_barrier(mpi_comm_world,mpi_ierror) - if(allocated(workgrid)) deallocate(workgrid) - - end do ! do l = 1, nctdim - - -!===================================================================== - - end subroutine fv3_regrid_nemsio - - !======================================================================= - - ! fv3_regrid_initialize.f90: - - !----------------------------------------------------------------------- - - subroutine fv3_regrid_initialize(grid) - - ! Define variables passed to routine - - implicit none - type(analysis_grid) :: grid - - !===================================================================== - - ! Define local variables - - call netcdfio_dimension(grid%filename,'grid_xt',grid%nx) - call netcdfio_dimension(grid%filename,'grid_yt',grid%ny) - if (n3dvar > 0) then - call netcdfio_dimension(grid%filename,'pfull',grid%nz) - else - grid%nz = 0 - endif - call netcdfio_dimension(grid%filename,'time',grid%ntime) - - !===================================================================== - - end subroutine fv3_regrid_initialize - - !======================================================================= - - ! fv3_grid_read.f90: - - !----------------------------------------------------------------------- - - subroutine fv3_grid_read(anlygrd,varname,is_2d,is_3d) - - ! Define variables passed to subroutine - - type(analysis_grid) :: anlygrd(ngrids) - character(len=20) :: varname - logical :: is_2d - logical :: is_3d - - ! Define counting variables - - integer :: i, j, k - - !===================================================================== - - ! Loop through local variable - - do k = 1, ngrids - - ! Check local variable and proceed accordingly - - if(debug) write(6,500) ncrec, k - if(is_2d) then - - ! Define local variables - - ! orog and psfc are in 3d file. - if (trim(varname) == 'orog' .or. trim(varname) == 'psfc') then - call netcdfio_values_2d(anlygrd(k)%filename,varname, & - & fv3_var_2d(k,:,:)) - else - call netcdfio_values_2d(anlygrd(k)%filename2d,varname, & - & fv3_var_2d(k,:,:)) - endif - - end if ! if(is_2d) - - ! Check local variable and proceed accordingly - - if(is_3d) then - - ! Define local variables - - call netcdfio_values_3d(anlygrd(k)%filename,varname, & - & fv3_var_3d(k,:,:,:)) - - end if ! if(is_3d) - - end do ! do k = 1, ngrids - - !===================================================================== - - ! Define format statements - -500 format('FV3_GRID_READ: Time record = ', i6, '; Cubed sphere face = ', & - & i1,'.') - - !===================================================================== - - end subroutine fv3_grid_read - - !======================================================================= - - ! fv3_grid_fhour.f90: - - !----------------------------------------------------------------------- - - subroutine fv3_grid_fhour(grid,fhour) - - ! Define variables passed to routine - - implicit none - type(analysis_grid) :: grid - integer :: fhour - - ! Define variables computed within routine - - real(nemsio_realkind) :: workgrid(grid%ntime) - real(nemsio_realkind) :: start_jday - real(nemsio_realkind) :: fcst_jday - integer :: year - integer :: month - integer :: day - integer :: hour - integer :: minute - integer :: second, iw3jdn - character(len=80) timeunits - - !===================================================================== - - ! Define local variables - - read(forecast_timestamp(1:4), '(i4)') year - read(forecast_timestamp(5:6), '(i2)') month - read(forecast_timestamp(7:8), '(i2)') day - read(forecast_timestamp(9:10),'(i2)') hour - minute = 0; second = 0 - - ! Compute local variables - - ! 'flux day' (days since dec 31 1900) - !call date2wnday(start_jday,year,month,day) - ! same as above, but valid after 2099 - start_jday=real(iw3jdn(year,month,day)-iw3jdn(1900,12,31)) - start_jday = start_jday + real(hour)/24.0 + real(minute)/1440.0 + & - & real(second)/86400.0 - - ! Define local variables - - call netcdfio_values_1d(grid%filename,'time',workgrid) - call netcdfio_variable_attr(grid%filename,'time','units',timeunits) - - ! Compute local variables - - ! ncrec is a global variable in the netcdfio-interface module - if (timeunits(1:4) == 'days') then - fcst_jday = start_jday + workgrid(ncrec) - else if (timeunits(1:5) == 'hours') then - fcst_jday = start_jday + workgrid(ncrec)/24. - else if (timeunits(1:7) == 'seconds') then - fcst_jday = start_jday + workgrid(ncrec)/86400.0 - else - print *,'unrecognized time units',trim(timeunits) - call mpi_interface_terminate() - stop - endif - fhour = nint((86400*(fcst_jday - start_jday))/3600.0) - - !===================================================================== - - end subroutine fv3_grid_fhour - -! SUBROUTINE DATE2WNDAY(WDAY, IYR,MON,IDY) -! IMPLICIT NONE -! INTEGER IYR,MON,IDY -! REAL WDAY -!! -!!********** -!!* -!! 1) CONVERT DATE INTO 'FLUX DAY'. -!! -!! 2) THE 'FLUX DAY' IS THE NUMBER OF DAYS SINCE 001/1901 (WHICH IS -!! FLUX DAY 1.0). -!! FOR EXAMPLE: -!! A) IYR=1901,MON=1,IDY=1, REPRESENTS 0000Z HRS ON 01/01/1901 -!! SO WDAY WOULD BE 1.0. -!! A) IYR=1901,MON=1,IDY=2, REPRESENTS 0000Z HRS ON 02/01/1901 -!! SO WDAY WOULD BE 2.0. -!! YEAR MUST BE NO LESS THAN 1901.0, AND NO GREATER THAN 2099.0. -!! NOTE THAT YEAR 2000 IS A LEAP YEAR (BUT 1900 AND 2100 ARE NOT). -!! -!! 3) ALAN J. WALLCRAFT, NAVAL RESEARCH LABORATORY, JULY 2002. -!!* -!!********** -!! -! INTEGER NLEAP -! REAL WDAY1 -! REAL MONTH(13) -! DATA MONTH / 0, 31, 59, 90, 120, 151, 181, & -! 212, 243, 273, 304, 334, 365 / -!! FIND THE RIGHT YEAR. -! NLEAP = (IYR-1901)/4 -! WDAY = 365.0*(IYR-1901) + NLEAP + MONTH(MON) + IDY -! IF (MOD(IYR,4).EQ.0 .AND. MON.GT.2) THEN -! WDAY = WDAY + 1.0 -! ENDIF -! END SUBROUTINE DATE2WNDAY - - !======================================================================= - -end module fv3_interface diff --git a/sorc/regrid_nemsio.fd/gfs_nems_interface.f90 b/sorc/regrid_nemsio.fd/gfs_nems_interface.f90 deleted file mode 100644 index aa1305dc01..0000000000 --- a/sorc/regrid_nemsio.fd/gfs_nems_interface.f90 +++ /dev/null @@ -1,595 +0,0 @@ -module gfs_nems_interface - - !======================================================================= - - ! Define associated modules and subroutines - - !----------------------------------------------------------------------- - - use constants - use kinds - - !----------------------------------------------------------------------- - - use interpolation_interface - use mpi_interface - use namelist_def - use nemsio_module - use netcdfio_interface - use variable_interface - - !----------------------------------------------------------------------- - - implicit none - - !----------------------------------------------------------------------- - - ! Define all data and structure types for routine; these variables - ! are variables required by the subroutines within this module - - type gfs_grid - real(r_kind), dimension(:,:), allocatable :: rlon - real(r_kind), dimension(:,:), allocatable :: rlat - integer :: ncoords - integer :: nlons - integer :: nlats - integer :: nz - end type gfs_grid ! type gfs_grid - - type nemsio_meta - character(nemsio_charkind), dimension(:), allocatable :: recname - character(nemsio_charkind), dimension(:), allocatable :: reclevtyp - character(nemsio_charkind), dimension(:), allocatable :: variname - character(nemsio_charkind), dimension(:), allocatable :: varr8name - character(nemsio_charkind), dimension(:), allocatable :: aryiname - character(nemsio_charkind), dimension(:), allocatable :: aryr8name - character(nemsio_charkind8) :: gdatatype - character(nemsio_charkind8) :: modelname - real(nemsio_realkind), dimension(:,:,:), allocatable :: vcoord - real(nemsio_realkind), dimension(:), allocatable :: lon - real(nemsio_realkind), dimension(:), allocatable :: lat - integer(nemsio_intkind), dimension(:,:), allocatable :: aryival - integer(nemsio_intkind), dimension(:), allocatable :: reclev - integer(nemsio_intkind), dimension(:), allocatable :: varival - integer(nemsio_intkind), dimension(:), allocatable :: aryilen - integer(nemsio_intkind), dimension(:), allocatable :: aryr8len - integer(nemsio_intkind) :: idate(7) - integer(nemsio_intkind) :: version - integer(nemsio_intkind) :: nreo_vc - integer(nemsio_intkind) :: nrec - integer(nemsio_intkind) :: nmeta - integer(nemsio_intkind) :: nmetavari - integer(nemsio_intkind) :: nmetaaryi - integer(nemsio_intkind) :: nfhour - integer(nemsio_intkind) :: nfminute - integer(nemsio_intkind) :: nfsecondn - integer(nemsio_intkind) :: nfsecondd - integer(nemsio_intkind) :: jcap - integer(nemsio_intkind) :: dimx - integer(nemsio_intkind) :: dimy - integer(nemsio_intkind) :: dimz - integer(nemsio_intkind) :: nframe - integer(nemsio_intkind) :: nsoil - integer(nemsio_intkind) :: ntrac - integer(nemsio_intkind) :: ncldt - integer(nemsio_intkind) :: idvc - integer(nemsio_intkind) :: idsl - integer(nemsio_intkind) :: idvm - integer(nemsio_intkind) :: idrt - integer(nemsio_intkind) :: fhour - end type nemsio_meta ! type nemsio_meta - - !----------------------------------------------------------------------- - - ! Define global variables - - type(nemsio_gfile) :: gfile2d,gfile3d - integer :: nemsio_iret - - !----------------------------------------------------------------------- - - ! Define interfaces and attributes for module routines - - private - public :: gfs_grid_initialize - public :: gfs_grid_cleanup - public :: gfs_grid - public :: gfs_nems_meta_initialization - public :: gfs_nems_meta_cleanup - public :: gfs_nems_initialize - public :: gfs_nems_finalize - public :: gfs_nems_write - public :: nemsio_meta - -contains - - !======================================================================= - - ! gfs_nems_write.f90: - - !----------------------------------------------------------------------- - - subroutine gfs_nems_write(c2dor3d,nems_data,nems_varname,nems_levtyp,nems_lev) - - ! Define variables passed to routine - - character(nemsio_charkind) :: nems_varname - character(nemsio_charkind) :: nems_levtyp - real(nemsio_realkind) :: nems_data(:) - integer(nemsio_intkind) :: nems_lev - character(len=2) :: c2dor3d - - !===================================================================== - - ! Define local variables - - if (c2dor3d == '2d') then - call nemsio_writerecv(gfile2d,trim(adjustl(nems_varname)),levtyp= & - & trim(adjustl(nems_levtyp)),lev=nems_lev,data=nems_data, & - & iret=nemsio_iret) - else if (c2dor3d == '3d') then - call nemsio_writerecv(gfile3d,trim(adjustl(nems_varname)),levtyp= & - & trim(adjustl(nems_levtyp)),lev=nems_lev,data=nems_data, & - & iret=nemsio_iret) - else - nemsio_iret=-99 - endif - - ! Check local variable and proceed accordingly - - if(debug) write(6,500) c2dor3d,trim(adjustl(nems_varname)), nemsio_iret, & - & nems_lev, minval(nems_data), maxval(nems_data) - - !===================================================================== - - ! Define format statements - -500 format('GFS_NEMS_WRITE',a2,': NEMS I/O name = ', a, '; writerecv return ', & - & 'code = ', i5,'; level = ', i3, '; (min,max) = (', f13.5,f13.5, & - & ').') - if (nemsio_iret /= 0) then - print *,'nemsio_writerecv failed, stopping...' - call mpi_interface_terminate() - stop - endif - - !===================================================================== - - end subroutine gfs_nems_write - - !======================================================================= - - ! gfs_nems_meta_initialization.f90: - - !----------------------------------------------------------------------- - - subroutine gfs_nems_meta_initialization(meta_nemsio,var_info,grid) - - ! Define variables passed to routine - - type(nemsio_meta) :: meta_nemsio - type(varinfo) :: var_info(:) - type(gfs_grid) :: grid - - ! Define variables computed within routine - - integer :: offset - integer :: n2dvar - integer :: n3dvar - - ! Define counting variables - - integer :: i, j, k - - !===================================================================== - - ! Allocate memory for local variables - - if(.not. allocated(meta_nemsio%recname)) & - & allocate(meta_nemsio%recname(meta_nemsio%nrec)) - if(.not. allocated(meta_nemsio%reclevtyp)) & - & allocate(meta_nemsio%reclevtyp(meta_nemsio%nrec)) - if(.not. allocated(meta_nemsio%reclev)) & - & allocate(meta_nemsio%reclev(meta_nemsio%nrec)) - if(.not. allocated(meta_nemsio%variname)) & - & allocate(meta_nemsio%variname(meta_nemsio%nmetavari)) - if(.not. allocated(meta_nemsio%varival)) & - & allocate(meta_nemsio%varival(meta_nemsio%nmetavari)) - if(.not. allocated(meta_nemsio%aryiname)) & - & allocate(meta_nemsio%aryiname(meta_nemsio%nmetaaryi)) - if(.not. allocated(meta_nemsio%aryilen)) & - & allocate(meta_nemsio%aryilen(meta_nemsio%nmetaaryi)) - if(.not. allocated(meta_nemsio%vcoord)) & - & allocate(meta_nemsio%vcoord(meta_nemsio%dimz+1,3,2)) - if(.not. allocated(meta_nemsio%aryival)) & - & allocate(meta_nemsio%aryival(grid%nlats/2, & - & meta_nemsio%nmetaaryi)) - if(.not. allocated(meta_nemsio%lon)) & - & allocate(meta_nemsio%lon(grid%ncoords)) - if(.not. allocated(meta_nemsio%lat)) & - & allocate(meta_nemsio%lat(grid%ncoords)) - meta_nemsio%vcoord(:,:,:)=0.0 - ! Define local variables - - meta_nemsio%lon = & - & reshape(grid%rlon,(/grid%ncoords/)) - meta_nemsio%lat = & - & reshape(grid%rlat,(/grid%ncoords/)) - meta_nemsio%aryilen(1) = grid%nlats/2 - meta_nemsio%aryiname(1) = 'lpl' - meta_nemsio%aryival(1:grid%nlats/2,1) = grid%nlons - k = 0 - - ! Loop through local variable - offset = 0 - n3dvar = 0 - n2dvar = 0 - - - do i = 1, size(var_info) - - ! Check local variable and proceed accordingly - - if(var_info(i)%ndims .eq. 2) then - - ! Define local variables - - k = k + 1 - meta_nemsio%reclev(k) = var_info(i)%nems_lev - meta_nemsio%recname(k) = trim(adjustl(var_info(i)%nems_name)) - meta_nemsio%reclevtyp(k) = trim(adjustl(var_info(i)%nems_levtyp)) - n2dvar = k - - else if(var_info(i)%ndims .eq. 3) then - - ! Loop through local variable - - meta_nemsio%variname(1) = 'LEVS' - meta_nemsio%varival(1) = meta_nemsio%dimz - meta_nemsio%variname(2) = 'NVCOORD' - meta_nemsio%varival(2) = 2 - meta_nemsio%variname(3) = 'IVS' - meta_nemsio%varival(3) = 200509 - do k = 1, meta_nemsio%dimz - - ! Define local variables - - meta_nemsio%reclev(k+n2dvar+offset) = k - meta_nemsio%recname(k+n2dvar+offset) = & - & trim(adjustl(var_info(i)%nems_name)) - meta_nemsio%reclevtyp(k+n2dvar+offset) = & - & trim(adjustl(var_info(i)%nems_levtyp)) - - end do ! do k = 1, nczdim - - ! Define local variables - - n3dvar = n3dvar + 1 - offset = nczdim*n3dvar - - end if ! if(var_info(i)%ndims .eq. 3) - - end do ! do i = 1, size(var_info) - - !===================================================================== - - end subroutine gfs_nems_meta_initialization - - !======================================================================= - - ! gfs_nems_meta_cleanup.f90: - - !----------------------------------------------------------------------- - - subroutine gfs_nems_meta_cleanup(meta_nemsio2d,meta_nemsio3d) - - ! Define variables passed to routine - - type(nemsio_meta) :: meta_nemsio2d,meta_nemsio3d - - !===================================================================== - - ! Deallocate memory for local variables - - if(allocated(meta_nemsio2d%recname)) & - & deallocate(meta_nemsio2d%recname) - if(allocated(meta_nemsio2d%reclevtyp)) & - & deallocate(meta_nemsio2d%reclevtyp) - if(allocated(meta_nemsio2d%reclev)) & - & deallocate(meta_nemsio2d%reclev) - if(allocated(meta_nemsio2d%variname)) & - & deallocate(meta_nemsio2d%variname) - if(allocated(meta_nemsio2d%aryiname)) & - & deallocate(meta_nemsio2d%aryiname) - if(allocated(meta_nemsio2d%aryival)) & - & deallocate(meta_nemsio2d%aryival) - if(allocated(meta_nemsio2d%aryilen)) & - & deallocate(meta_nemsio2d%aryilen) - if(allocated(meta_nemsio2d%vcoord)) & - & deallocate(meta_nemsio2d%vcoord) - if(allocated(meta_nemsio2d%lon)) & - & deallocate(meta_nemsio2d%lon) - if(allocated(meta_nemsio2d%lat)) & - & deallocate(meta_nemsio2d%lat) - if(allocated(meta_nemsio3d%recname)) & - & deallocate(meta_nemsio3d%recname) - if(allocated(meta_nemsio3d%reclevtyp)) & - & deallocate(meta_nemsio3d%reclevtyp) - if(allocated(meta_nemsio3d%reclev)) & - & deallocate(meta_nemsio3d%reclev) - if(allocated(meta_nemsio3d%variname)) & - & deallocate(meta_nemsio3d%variname) - if(allocated(meta_nemsio3d%aryiname)) & - & deallocate(meta_nemsio3d%aryiname) - if(allocated(meta_nemsio3d%aryival)) & - & deallocate(meta_nemsio3d%aryival) - if(allocated(meta_nemsio3d%aryilen)) & - & deallocate(meta_nemsio3d%aryilen) - if(allocated(meta_nemsio3d%vcoord)) & - & deallocate(meta_nemsio3d%vcoord) - if(allocated(meta_nemsio3d%lon)) & - & deallocate(meta_nemsio3d%lon) - if(allocated(meta_nemsio3d%lat)) & - & deallocate(meta_nemsio3d%lat) - - !===================================================================== - - end subroutine gfs_nems_meta_cleanup - - !======================================================================= - - ! gfs_nems_initialize.f90: - - !----------------------------------------------------------------------- - - subroutine gfs_nems_initialize(meta_nemsio2d, meta_nemsio3d) - - ! Define variables passed to routine - - type(nemsio_meta) :: meta_nemsio2d,meta_nemsio3d - character(len=500) :: filename - character(len=7) :: suffix - - !===================================================================== - - ! Define local variables - - call nemsio_init(iret=nemsio_iret) - write(suffix,500) meta_nemsio2d%nfhour - filename = trim(adjustl(datapathout2d))//suffix - meta_nemsio2d%gdatatype = trim(adjustl(nemsio_opt2d)) - meta_nemsio3d%gdatatype = trim(adjustl(nemsio_opt3d)) - call nemsio_open(gfile2d,trim(adjustl(filename)),'write', & - & iret=nemsio_iret, & - & modelname=trim(adjustl(meta_nemsio2d%modelname)), & - & version=meta_nemsio2d%version, & - & gdatatype=meta_nemsio2d%gdatatype, & - & jcap=meta_nemsio2d%jcap, & - & dimx=meta_nemsio2d%dimx, & - & dimy=meta_nemsio2d%dimy, & - & dimz=meta_nemsio2d%dimz, & - & idate=meta_nemsio2d%idate, & - & nrec=meta_nemsio2d%nrec, & - & nframe=meta_nemsio2d%nframe, & - & idrt=meta_nemsio2d%idrt, & - & ncldt=meta_nemsio2d%ncldt, & - & idvc=meta_nemsio2d%idvc, & - & idvm=meta_nemsio2d%idvm, & - & idsl=meta_nemsio2d%idsl, & - & nfhour=meta_nemsio2d%fhour, & - & nfminute=meta_nemsio2d%nfminute, & - & nfsecondn=meta_nemsio2d%nfsecondn, & - & nfsecondd=meta_nemsio2d%nfsecondd, & - & extrameta=.true., & - & nmetaaryi=meta_nemsio2d%nmetaaryi, & - & recname=meta_nemsio2d%recname, & - & reclevtyp=meta_nemsio2d%reclevtyp, & - & reclev=meta_nemsio2d%reclev, & - & aryiname=meta_nemsio2d%aryiname, & - & aryilen=meta_nemsio2d%aryilen, & - & aryival=meta_nemsio2d%aryival, & - & vcoord=meta_nemsio2d%vcoord) - write(suffix,500) meta_nemsio3d%nfhour - filename = trim(adjustl(datapathout3d))//suffix - call nemsio_open(gfile3d,trim(adjustl(filename)),'write', & - & iret=nemsio_iret, & - & modelname=trim(adjustl(meta_nemsio3d%modelname)), & - & version=meta_nemsio3d%version, & - & gdatatype=meta_nemsio3d%gdatatype, & - & jcap=meta_nemsio3d%jcap, & - & dimx=meta_nemsio3d%dimx, & - & dimy=meta_nemsio3d%dimy, & - & dimz=meta_nemsio3d%dimz, & - & idate=meta_nemsio3d%idate, & - & nrec=meta_nemsio3d%nrec, & - & nframe=meta_nemsio3d%nframe, & - & idrt=meta_nemsio3d%idrt, & - & ncldt=meta_nemsio3d%ncldt, & - & idvc=meta_nemsio3d%idvc, & - & idvm=meta_nemsio3d%idvm, & - & idsl=meta_nemsio3d%idsl, & - & nfhour=meta_nemsio3d%fhour, & - & nfminute=meta_nemsio3d%nfminute, & - & nfsecondn=meta_nemsio3d%nfsecondn, & - & nfsecondd=meta_nemsio3d%nfsecondd, & - & extrameta=.true., & - & nmetaaryi=meta_nemsio3d%nmetaaryi, & - & recname=meta_nemsio3d%recname, & - & reclevtyp=meta_nemsio3d%reclevtyp, & - & reclev=meta_nemsio3d%reclev, & - & aryiname=meta_nemsio3d%aryiname, & - & aryilen=meta_nemsio3d%aryilen, & - & aryival=meta_nemsio3d%aryival, & - & variname=meta_nemsio3d%variname, & - & varival=meta_nemsio3d%varival, & - & nmetavari=meta_nemsio3d%nmetavari, & - & vcoord=meta_nemsio3d%vcoord) - - !===================================================================== - - ! Define format statements - -500 format('.fhr',i3.3) - - !===================================================================== - - end subroutine gfs_nems_initialize - - !======================================================================= - - ! gfs_nems_finalize.f90: - - !----------------------------------------------------------------------- - - subroutine gfs_nems_finalize() - - !===================================================================== - - ! Define local variables - - call nemsio_close(gfile2d,iret=nemsio_iret) - call nemsio_close(gfile3d,iret=nemsio_iret) - - !===================================================================== - - end subroutine gfs_nems_finalize - - !======================================================================= - - ! gfs_grid_initialize.f90: - - !----------------------------------------------------------------------- - - subroutine gfs_grid_initialize(grid) - - ! Define variables passed to routine - - type(gfs_grid) :: grid - - ! Define variables computed within routine - - real(r_kind), dimension(:), allocatable :: slat - real(r_kind), dimension(:), allocatable :: wlat - real(r_kind), dimension(:), allocatable :: workgrid - - ! Define counting variables - - integer :: i, j, k - - !===================================================================== - - ! Check local variable and proceed accordingly - - if(mpi_procid .eq. mpi_masternode) then - - ! Define local variables - - call init_constants_derived() - - ! Check local variable and proceed accordingly - - ! Define local variables - - grid%nlons = nlons - grid%nlats = nlats - - end if ! if(mpi_procid .eq. mpi_masternode) - - ! Define local variables - - call mpi_barrier(mpi_comm_world,mpi_ierror) - - ! Broadcast all necessary variables to compute nodes - - call mpi_bcast(grid%nlons,1,mpi_integer,mpi_masternode,mpi_comm_world, & - & mpi_ierror) - call mpi_bcast(grid%nlats,1,mpi_integer,mpi_masternode,mpi_comm_world, & - & mpi_ierror) - - ! Allocate memory for local variables - - if(.not. allocated(grid%rlon)) & - & allocate(grid%rlon(grid%nlons,grid%nlats)) - if(.not. allocated(grid%rlat)) & - & allocate(grid%rlat(grid%nlons,grid%nlats)) - - ! Check local variable and proceed accordingly - - if(mpi_procid .eq. mpi_masternode) then - - ! Allocate memory for local variables - - if(.not. allocated(slat)) allocate(slat(grid%nlats)) - if(.not. allocated(wlat)) allocate(wlat(grid%nlats)) - if(.not. allocated(workgrid)) allocate(workgrid(grid%nlats)) - - ! Compute local variables - - grid%ncoords = grid%nlons*grid%nlats - call splat(grid%nlats,slat,wlat) - workgrid = acos(slat) - pi/2.0 - - ! Loop through local variable - - do j = 1, grid%nlats - - ! Loop through local variable - - do i = 1, grid%nlons - - ! Compute local variables - - grid%rlon(i,j) = (i-1)*(360./grid%nlons)*deg2rad - grid%rlat(i,j) = workgrid(grid%nlats - j + 1) - - end do ! do i = 1, grid%nlons - - end do ! do j = 1, grid%nlats - - ! Deallocate memory for local variables - - if(allocated(slat)) deallocate(slat) - if(allocated(wlat)) deallocate(wlat) - if(allocated(workgrid)) deallocate(workgrid) - - endif ! if(mpi_procid .eq. mpi_masternode) - - ! Broadcast all necessary variables to compute nodes - - call mpi_bcast(grid%ncoords,1,mpi_integer,mpi_masternode, & - & mpi_comm_world,mpi_ierror) - call mpi_bcast(grid%rlon,grid%ncoords,mpi_real,mpi_masternode, & - & mpi_comm_world,mpi_ierror) - call mpi_bcast(grid%rlat,grid%ncoords,mpi_real,mpi_masternode, & - & mpi_comm_world,mpi_ierror) - - !===================================================================== - - end subroutine gfs_grid_initialize - - !======================================================================= - - ! gfs_grid_cleanup.f90: - - !----------------------------------------------------------------------- - - subroutine gfs_grid_cleanup(grid) - - ! Define variables passed to routine - - type(gfs_grid) :: grid - - !===================================================================== - - ! Deallocate memory for local variables - - if(allocated(grid%rlon)) deallocate(grid%rlon) - if(allocated(grid%rlat)) deallocate(grid%rlat) - - !===================================================================== - - end subroutine gfs_grid_cleanup - - !======================================================================= - -end module gfs_nems_interface diff --git a/sorc/regrid_nemsio.fd/interpolation_interface.f90 b/sorc/regrid_nemsio.fd/interpolation_interface.f90 deleted file mode 100644 index 775d1a7cc3..0000000000 --- a/sorc/regrid_nemsio.fd/interpolation_interface.f90 +++ /dev/null @@ -1,335 +0,0 @@ -module interpolation_interface - - !======================================================================= - - ! Define associated modules and subroutines - - !----------------------------------------------------------------------- - - use constants - use kinds - - !----------------------------------------------------------------------- - - use namelist_def - use netcdf - use netcdfio_interface - use mpi_interface - - !----------------------------------------------------------------------- - - implicit none - - !----------------------------------------------------------------------- - - ! Define interfaces and attributes for module routines - - private - public :: interpolation_initialize_gridvar - public :: interpolation_initialize_esmf - public :: interpolation_define_gridvar - public :: interpolation_define_gridvar_out - public :: interpolation_esmf - public :: interpolation_esmf_vect - public :: gridvar - public :: esmfgrid - - !----------------------------------------------------------------------- - - ! Define all data and structure types for routine; these variables - ! are variables required by the subroutines within this module - - type esmfgrid - character(len=500) :: filename - real(r_double), dimension(:), allocatable :: s - integer, dimension(:), allocatable :: col - integer, dimension(:), allocatable :: row - real(r_double), dimension(:), allocatable :: inlats - real(r_double), dimension(:), allocatable :: inlons - real(r_double), dimension(:), allocatable :: outlats - real(r_double), dimension(:), allocatable :: outlons - integer :: n_s,n_a,n_b - end type esmfgrid ! type esmfgrid - - type gridvar - logical, dimension(:), allocatable :: check - real(r_double), dimension(:), allocatable :: var - integer :: ncoords - integer :: nx - integer :: ny - end type gridvar ! type gridvar - - ! Define global variables - - integer :: ncfileid - integer :: ncvarid - integer :: ncdimid - integer :: ncstatus - - !----------------------------------------------------------------------- - -contains - - !======================================================================= - - subroutine interpolation_define_gridvar(grid,xdim,ydim,ngrid,input) -! collapses the cubed grid into a 1-d array -! Define variables passed to routine - - use nemsio_module, only: nemsio_realkind - integer,intent(in) :: ngrid - integer,intent(in) :: xdim,ydim - type(gridvar),intent(inout) :: grid - real(nemsio_realkind),intent(in) :: input(ngrid,xdim,ydim) - -! locals - integer :: i,j,k,ncount - - ncount = 1 - do k = 1, ngrid - do j = 1, ydim - do i = 1, xdim - grid%var(ncount) = input(k,i,j) - ncount = ncount + 1 - end do - end do - end do - - - end subroutine interpolation_define_gridvar - -!======================================================================= - - - subroutine interpolation_define_gridvar_out(grid,xdim,ydim,output) -! make a 2-d array for output - ! Define variables passed to routine - - integer,intent(in) :: xdim,ydim - type(gridvar),intent(in) :: grid - real(r_double),intent(out) :: output(xdim,ydim) - -! locals - integer :: i,j,ncount - - ncount = 1 - do j = 1, ydim - do i = 1, xdim - output(j,i) = grid%var(ncount) - ncount = ncount + 1 - enddo - enddo - - end subroutine interpolation_define_gridvar_out - - !======================================================================= - - subroutine interpolation_initialize_gridvar(grid) - - ! Define variables passed to routine - - type(gridvar) :: grid - - allocate(grid%var(grid%ncoords)) - - end subroutine interpolation_initialize_gridvar - - -!======================================================================= - - subroutine interpolation_initialize_esmf(grid) - - ! Define variables passed to routine - - type(esmfgrid) :: grid - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(grid%filename)),mode= & - & nf90_nowrite,ncid=ncfileid) - ncstatus = nf90_inq_dimid(ncfileid,'n_s',ncdimid) - ncstatus = nf90_inquire_dimension(ncfileid,ncdimid,len=grid%n_s) - ncstatus = nf90_inq_dimid(ncfileid,'n_a',ncdimid) - ncstatus = nf90_inquire_dimension(ncfileid,ncdimid,len=grid%n_a) - ncstatus = nf90_inq_dimid(ncfileid,'n_b',ncdimid) - ncstatus = nf90_inquire_dimension(ncfileid,ncdimid,len=grid%n_b) - - - ! Allocate memory for local variables - - allocate(grid%s(grid%n_s)) - allocate(grid%row(grid%n_s)) - allocate(grid%col(grid%n_s)) - - allocate(grid%inlats(grid%n_a)) - allocate(grid%inlons(grid%n_a)) - allocate(grid%outlats(grid%n_b)) - allocate(grid%outlons(grid%n_b)) - - ncstatus = nf90_inq_varid(ncfileid,'col',ncvarid) - ncstatus = nf90_get_var(ncfileid,ncvarid,grid%col) - ncstatus = nf90_inq_varid(ncfileid,'row',ncvarid) - ncstatus = nf90_get_var(ncfileid,ncvarid,grid%row) - ncstatus = nf90_inq_varid(ncfileid,'S',ncvarid) - ncstatus = nf90_get_var(ncfileid,ncvarid,grid%s) - ncstatus = nf90_inq_varid(ncfileid,'yc_a',ncvarid) - ncstatus = nf90_get_var(ncfileid,ncvarid,grid%inlats) - ncstatus = nf90_inq_varid(ncfileid,'xc_a',ncvarid) - ncstatus = nf90_get_var(ncfileid,ncvarid,grid%inlons) - where(grid%inlons .LT. 0.0) - grid%inlons=360+grid%inlons - endwhere - ncstatus = nf90_inq_varid(ncfileid,'yc_b',ncvarid) - ncstatus = nf90_get_var(ncfileid,ncvarid,grid%outlats) - ncstatus = nf90_inq_varid(ncfileid,'xc_b',ncvarid) - ncstatus = nf90_get_var(ncfileid,ncvarid,grid%outlons) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - end subroutine interpolation_initialize_esmf - - -!======================================================================= - - - subroutine interpolation_esmf(invar,outvar,grid,is_nrstnghbr) - - ! Define variables passed to routine - - type(gridvar) :: invar - type(gridvar) :: outvar - logical :: is_nrstnghbr - - type(esmfgrid) :: grid - - integer :: i, j, k, l - - outvar%var = dble(0.0) - - if(is_nrstnghbr) then - do i = 1, grid%n_s - outvar%var(grid%row(i)) = invar%var(grid%col(i)) - enddo - else - do i = 1, grid%n_s - outvar%var(grid%row(i)) = outvar%var(grid%row(i)) + grid%s(i)*invar%var(grid%col(i)) - end do - end if - - end subroutine interpolation_esmf -!===================================================================== - - subroutine interpolation_esmf_vect(invaru,invarv,grid,outvaru,outvarv) - - ! Define variables passed to routine - - type(gridvar) :: invaru,invarv - type(gridvar) :: outvaru,outvarv - type(esmfgrid) :: grid - - integer :: i, j, k, l - real(r_double) :: cxy,sxy,urot,vrot - - - outvaru%var = dble(0.0) - outvarv%var = dble(0.0) - - do i = 1, grid%n_s - CALL MOVECT(grid%inlats(grid%col(i)),grid%inlons(grid%col(i)),& - grid%outlats(grid%row(i)),grid%outlons(grid%row(i)),& - cxy,sxy) - urot=cxy*invaru%var(grid%col(i))-sxy*invarv%var(grid%col(i)) - vrot=sxy*invaru%var(grid%col(i))+cxy*invarv%var(grid%col(i)) - outvaru%var(grid%row(i)) = outvaru%var(grid%row(i)) + grid%s(i)*urot - outvarv%var(grid%row(i)) = outvarv%var(grid%row(i)) + grid%s(i)*vrot - - end do - - end subroutine interpolation_esmf_vect - -!===================================================================== - - SUBROUTINE MOVECT(FLAT,FLON,TLAT,TLON,CROT,SROT) -!$$$ SUBPROGRAM DOCUMENTATION BLOCK -! -! SUBPROGRAM: MOVECT MOVE A VECTOR ALONG A GREAT CIRCLE -! PRGMMR: IREDELL ORG: W/NMC23 DATE: 96-04-10 -! -! ABSTRACT: THIS SUBPROGRAM PROVIDES THE ROTATION PARAMETERS -! TO MOVE A VECTOR ALONG A GREAT CIRCLE FROM ONE -! POSITION TO ANOTHER WHILE CONSERVING ITS ORIENTATION -! WITH RESPECT TO THE GREAT CIRCLE. THESE ROTATION -! PARAMETERS ARE USEFUL FOR VECTOR INTERPOLATION. -! -! PROGRAM HISTORY LOG: -! 96-04-10 IREDELL -! 1999-04-08 IREDELL GENERALIZE PRECISION -! -! USAGE: CALL MOVECT(FLAT,FLON,TLAT,TLON,CROT,SROT) -! -! INPUT ARGUMENT LIST: -! FLAT - REAL LATITUDE IN DEGREES FROM WHICH TO MOVE THE VECTOR -! FLON - REAL LONGITUDE IN DEGREES FROM WHICH TO MOVE THE VECTOR -! TLAT - REAL LATITUDE IN DEGREES TO WHICH TO MOVE THE VECTOR -! TLON - REAL LONGITUDE IN DEGREES TO WHICH TO MOVE THE VECTOR -! -! OUTPUT ARGUMENT LIST: -! CROT - REAL CLOCKWISE VECTOR ROTATION COSINE -! SROT - REAL CLOCKWISE VECTOR ROTATION SINE -! (UTO=CROT*UFROM-SROT*VFROM; -! VTO=SROT*UFROM+CROT*VFROM) -! -! ATTRIBUTES: -! LANGUAGE: FORTRAN 90 -! -!$$$ - IMPLICIT NONE -! - INTEGER, PARAMETER :: KD=SELECTED_REAL_KIND(15,45) -! - REAL(KIND=r_double), INTENT(IN ) :: FLAT, FLON - REAL(KIND=r_double), INTENT(IN ) :: TLAT, TLON - REAL(KIND=r_double), INTENT( OUT) :: CROT, SROT -! - REAL(KIND=r_double), PARAMETER :: CRDLIM=0.9999999 - REAL(KIND=r_double), PARAMETER :: PI=3.14159265358979 - REAL(KIND=r_double), PARAMETER :: DPR=180./PI -! - REAL(KIND=r_double) :: CTLAT,STLAT,CFLAT,SFLAT - REAL(KIND=r_double) :: CDLON,SDLON,CRD - REAL(KIND=r_double) :: SRD2RN,STR,CTR,SFR,CFR -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE COSINE OF THE RADIAL DISTANCE BETWEEN THE POINTS. - CTLAT=COS(TLAT/DPR) - STLAT=SIN(TLAT/DPR) -CFLAT=COS(FLAT/DPR) - SFLAT=SIN(FLAT/DPR) - CDLON=COS((FLON-TLON)/DPR) - SDLON=SIN((FLON-TLON)/DPR) - CRD=STLAT*SFLAT+CTLAT*CFLAT*CDLON -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -! COMPUTE ROTATIONS AT BOTH POINTS WITH RESPECT TO THE GREAT CIRCLE -! AND COMBINE THEM TO GIVE THE TOTAL VECTOR ROTATION PARAMETERS. - IF(ABS(CRD).LE.CRDLIM) THEN - SRD2RN=-1/(1-CRD**2) - STR=CFLAT*SDLON - CTR=CFLAT*STLAT*CDLON-SFLAT*CTLAT - SFR=CTLAT*SDLON - CFR=CTLAT*SFLAT*CDLON-STLAT*CFLAT - CROT=SRD2RN*(CTR*CFR-STR*SFR) - SROT=SRD2RN*(CTR*SFR+STR*CFR) -! USE A DIFFERENT APPROXIMATION FOR NEARLY COINCIDENT POINTS. -! MOVING VECTORS TO ANTIPODAL POINTS IS AMBIGUOUS ANYWAY. - ELSE - CROT=CDLON - SROT=SDLON*STLAT - ENDIF -! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - END SUBROUTINE MOVECT - - !======================================================================= - -end module interpolation_interface diff --git a/sorc/regrid_nemsio.fd/kinds.f90 b/sorc/regrid_nemsio.fd/kinds.f90 deleted file mode 100644 index 11c93b98e0..0000000000 --- a/sorc/regrid_nemsio.fd/kinds.f90 +++ /dev/null @@ -1,107 +0,0 @@ -! this module was extracted from the GSI version operational -! at NCEP in Dec. 2007. -module kinds -!$$$ module documentation block -! . . . . -! module: kinds -! prgmmr: treadon org: np23 date: 2004-08-15 -! -! abstract: Module to hold specification kinds for variable declaration. -! This module is based on (copied from) Paul vanDelst's -! type_kinds module found in the community radiative transfer -! model -! -! module history log: -! 2004-08-15 treadon -! -! Subroutines Included: -! -! Functions Included: -! -! remarks: -! The numerical data types defined in this module are: -! i_byte - specification kind for byte (1-byte) integer variable -! i_short - specification kind for short (2-byte) integer variable -! i_long - specification kind for long (4-byte) integer variable -! i_llong - specification kind for double long (8-byte) integer variable -! r_single - specification kind for single precision (4-byte) real variable -! r_double - specification kind for double precision (8-byte) real variable -! r_quad - specification kind for quad precision (16-byte) real variable -! -! i_kind - generic specification kind for default integer -! r_kind - generic specification kind for default floating point -! -! -! attributes: -! language: f90 -! machine: ibm RS/6000 SP -! -!$$$ end documentation block - implicit none - private - -! Integer type definitions below - -! Integer types - integer, parameter, public :: i_byte = selected_int_kind(1) ! byte integer - integer, parameter, public :: i_short = selected_int_kind(4) ! short integer - integer, parameter, public :: i_long = selected_int_kind(8) ! long integer - integer, parameter, private :: llong_t = selected_int_kind(16) ! llong integer - integer, parameter, public :: i_llong = max( llong_t, i_long ) - -! Expected 8-bit byte sizes of the integer kinds - integer, parameter, public :: num_bytes_for_i_byte = 1 - integer, parameter, public :: num_bytes_for_i_short = 2 - integer, parameter, public :: num_bytes_for_i_long = 4 - integer, parameter, public :: num_bytes_for_i_llong = 8 - -! Define arrays for default definition - integer, parameter, private :: num_i_kinds = 4 - integer, parameter, dimension( num_i_kinds ), private :: integer_types = (/ & - i_byte, i_short, i_long, i_llong /) - integer, parameter, dimension( num_i_kinds ), private :: integer_byte_sizes = (/ & - num_bytes_for_i_byte, num_bytes_for_i_short, & - num_bytes_for_i_long, num_bytes_for_i_llong /) - -! Default values -! **** CHANGE THE FOLLOWING TO CHANGE THE DEFAULT INTEGER TYPE KIND *** - integer, parameter, public :: default_integer = 3 ! 1=byte, - ! 2=short, - ! 3=long, - ! 4=llong - integer, parameter, public :: i_kind = integer_types( default_integer ) - integer, parameter, public :: num_bytes_for_i_kind = & - integer_byte_sizes( default_integer ) - - -! Real definitions below - -! Real types - integer, parameter, public :: r_single = selected_real_kind(6) ! single precision - integer, parameter, public :: r_double = selected_real_kind(15) ! double precision - integer, parameter, private :: quad_t = selected_real_kind(20) ! quad precision - integer, parameter, public :: r_quad = max( quad_t, r_double ) - -! Expected 8-bit byte sizes of the real kinds - integer, parameter, public :: num_bytes_for_r_single = 4 - integer, parameter, public :: num_bytes_for_r_double = 8 - integer, parameter, public :: num_bytes_for_r_quad = 16 - -! Define arrays for default definition - integer, parameter, private :: num_r_kinds = 3 - integer, parameter, dimension( num_r_kinds ), private :: real_kinds = (/ & - r_single, r_double, r_quad /) - integer, parameter, dimension( num_r_kinds ), private :: real_byte_sizes = (/ & - num_bytes_for_r_single, num_bytes_for_r_double, & - num_bytes_for_r_quad /) - -! Default values -! **** CHANGE THE FOLLOWING TO CHANGE THE DEFAULT REAL TYPE KIND *** - integer, parameter, public :: default_real = 1 ! 1=single, - ! 2=double, - ! 3=quad - integer, parameter, public :: r_kind = real_kinds( default_real ) - integer, parameter, public :: num_bytes_for_r_kind = & - real_byte_sizes( default_real ) - -end module kinds diff --git a/sorc/regrid_nemsio.fd/main.f90 b/sorc/regrid_nemsio.fd/main.f90 deleted file mode 100644 index f3dfe4ef09..0000000000 --- a/sorc/regrid_nemsio.fd/main.f90 +++ /dev/null @@ -1,92 +0,0 @@ -program regrid_nemsio_main - - !===================================================================== - - !$$$ PROGRAM DOCUMENTATION BLOCK - ! - ! ABSTRACT: - ! - ! This routine provides an interface between the National Oceanic - ! and Atmospheric Administration (NOAA) National Centers for - ! Environmental Prediction (NCEP) implemented NOAA Environmental - ! Modeling System (NEMS) input/output file format and the native - ! FV3 cubed sphere grid. - ! - ! NOTES: - ! - ! * Uses interpolation weights generated by - ! Earth-System Modeling Framework (ESMF) remapping utilities. - ! - ! PRGMMR: Winterbottom - ! ORG: ESRL/PSD1 - ! DATE: 2016-02-02 - ! - ! PROGRAM HISTORY LOG: - ! - ! 2016-02-02 Initial version. Henry R. Winterbottom - ! 2016-11-01 Modifed by Jeff Whitaker. - ! - !$$$ - - !===================================================================== - - ! Define associated modules and subroutines - - !--------------------------------------------------------------------- - - use kinds - - !--------------------------------------------------------------------- - - use mpi_interface - use fv3_interface - use namelist_def - use constants - - !--------------------------------------------------------------------- - - implicit none - - !===================================================================== - - ! Define variables computed within routine - - real(r_kind) :: exectime_start - real(r_kind) :: exectime_finish - - !===================================================================== - - ! Define local variables - - call mpi_interface_initialize() - call init_constants(.false.) - - if(mpi_procid .eq. mpi_masternode) then - - call cpu_time(exectime_start) - - end if - call mpi_barrier(mpi_comm_world,mpi_ierror) - - call namelistparams() - call fv3_regrid_nemsio() - - - if(mpi_procid .eq. mpi_masternode) then - - call cpu_time(exectime_finish) - write(6,500) exectime_finish - exectime_start - - end if ! if(mpi_procid .eq. mpi_masternode) - - call mpi_barrier(mpi_comm_world,mpi_ierror) - call mpi_interface_terminate() - - !===================================================================== - ! Define format statements - -500 format('MAIN: Execution time: ', f13.5, ' seconds.') - - !===================================================================== - -end program regrid_nemsio_main diff --git a/sorc/regrid_nemsio.fd/mpi_interface.f90 b/sorc/regrid_nemsio.fd/mpi_interface.f90 deleted file mode 100644 index 2e6c5c7a94..0000000000 --- a/sorc/regrid_nemsio.fd/mpi_interface.f90 +++ /dev/null @@ -1,89 +0,0 @@ -module mpi_interface - - !======================================================================= - - use kinds - - !----------------------------------------------------------------------- - - implicit none - - !----------------------------------------------------------------------- - - ! Define necessary include files - - include "mpif.h" - - !----------------------------------------------------------------------- - - ! Define global variables - - character :: mpi_nodename(mpi_max_processor_name) - character :: mpi_noderequest - logical :: abort_mpi - integer(kind=4), dimension(:), allocatable :: mpi_ranks - integer(kind=4) :: mpi_errorstatus(mpi_status_size) - integer(kind=4) :: mpi_masternode - integer(kind=4) :: mpi_slavenode - integer(kind=4) :: mpi_ierror - integer(kind=4) :: mpi_ierrorcode - integer(kind=4) :: mpi_procid - integer(kind=4) :: mpi_nprocs - integer(kind=4) :: mpi_node_source - integer(kind=4) :: mpi_node_destination - integer(kind=4) :: mpi_loopcount - integer(kind=4) :: mpi_request - integer(kind=4) :: mpi_group_user - integer(kind=4) :: mpi_group_nprocs - integer(kind=4) :: mpi_group_procid - integer(kind=4) :: mpi_group_begin - integer(kind=4) :: mpi_group_end - - !----------------------------------------------------------------------- - -contains - - !======================================================================= - - ! mpi_interface_initialize.f90: - - !----------------------------------------------------------------------- - - subroutine mpi_interface_initialize() - - !===================================================================== - - ! Define local variables - - call mpi_init(mpi_ierror) - call mpi_comm_rank(mpi_comm_world,mpi_procid,mpi_ierror) - call mpi_comm_size(mpi_comm_world,mpi_nprocs,mpi_ierror) - mpi_masternode = 0 - abort_mpi = .false. - - !===================================================================== - - end subroutine mpi_interface_initialize - - !======================================================================= - - ! mpi_interface_terminate.f90: - - !----------------------------------------------------------------------- - - subroutine mpi_interface_terminate() - - !===================================================================== - - ! Define local variables - - !call mpi_abort(mpi_comm_world,ierror_code,mpi_ierror) - call mpi_finalize(mpi_ierror) - - !===================================================================== - - end subroutine mpi_interface_terminate - - !======================================================================= - -end module mpi_interface diff --git a/sorc/regrid_nemsio.fd/namelist_def.f90 b/sorc/regrid_nemsio.fd/namelist_def.f90 deleted file mode 100644 index ff15a335f5..0000000000 --- a/sorc/regrid_nemsio.fd/namelist_def.f90 +++ /dev/null @@ -1,181 +0,0 @@ -module namelist_def - - !======================================================================= - - ! Define associated modules and subroutines - - !----------------------------------------------------------------------- - - use kinds - - !----------------------------------------------------------------------- - - use mpi_interface - - !----------------------------------------------------------------------- - - implicit none - - !----------------------------------------------------------------------- - - ! Define global variables - - integer, parameter :: max_ngrids = 12 - character(len=500) :: analysis_filename(max_ngrids) = 'NOT USED' - character(len=500) :: analysis_filename2d(max_ngrids) = 'NOT USED' - character(len=500) :: gfs_hyblevs_filename = 'NOT USED' - character(len=500) :: esmf_neareststod_filename = 'NOT USED' - character(len=500) :: esmf_bilinear_filename = 'NOT USED' - character(len=500) :: variable_table = 'NOT USED' - character(len=500) :: datapathout2d = './' - character(len=500) :: datapathout3d = './' - character(len=19) :: forecast_timestamp = '0000-00-00_00:00:00' - character(len=4) :: nemsio_opt = 'bin4' - character(len=4) :: nemsio_opt2d = 'none' - character(len=4) :: nemsio_opt3d = 'none' - logical :: is_ugrid2sgrid = .false. - logical :: debug = .false. - integer :: nlons = 0 - integer :: nlats = 0 - integer :: ntrunc = 0 - integer :: ngrids = 0 - namelist /share/ debug, nlons,nlats,ntrunc,datapathout2d,datapathout3d, & - analysis_filename, forecast_timestamp, nemsio_opt, nemsio_opt2d, nemsio_opt3d, & - analysis_filename2d, variable_table - - namelist /interpio/ esmf_bilinear_filename, esmf_neareststod_filename, gfs_hyblevs_filename - - !--------------------------------------------------------------------- - -contains - - !===================================================================== - - ! namelistparams.f90: - - !--------------------------------------------------------------------- - - subroutine namelistparams() - - ! Define variables computed within routine - - logical :: is_it_there - integer :: unit_nml - - ! Define counting variables - - integer :: i, j, k - - !=================================================================== - - ! Define local variables - - unit_nml = 9 - is_it_there = .false. - inquire(file='regrid-nemsio.input',exist = is_it_there) - - ! Check local variable and proceed accordingly - - if(is_it_there) then - - ! Define local variables - - open(file = 'regrid-nemsio.input', & - unit = unit_nml , & - status = 'old' , & - form = 'formatted' , & - action = 'read' , & - access = 'sequential' ) - read(unit_nml,NML = share) - read(unit_nml,NML = interpio) - close(unit_nml) - if (nemsio_opt2d == 'none') nemsio_opt2d=nemsio_opt - if (nemsio_opt3d == 'none') nemsio_opt3d=nemsio_opt - - ! Loop through local variable - - do i = 1, max_ngrids - - ! Check local variable and proceed accordingly - - if(analysis_filename(i) .ne. 'NOT USED') then - - ! Define local variables - - ngrids = ngrids + 1 - - end if ! if(analysis_filename(i) .ne. 'NOT USED') - - end do ! do i = 1, max_ngrids - - else ! if(is_it_there) - - ! Define local variables - - if(mpi_procid .eq. mpi_masternode) write(6,500) - call mpi_barrier(mpi_comm_world,mpi_ierror) - call mpi_interface_terminate() - - end if ! if(.not. is_it_there) - - !=================================================================== - - ! Check local variable and proceed accordingly - - if(mpi_procid .eq. mpi_masternode) then - - ! Define local variables - - write(6,*) '&SHARE' - write(6,*) 'DEBUG = ', debug - write(6,*) 'ANALYSIS_FILENAME = ' - do k = 1, ngrids - write(6,*) trim(adjustl(analysis_filename(k))) - ! if analysis_filename2d not specified, set to analysis_filename - if (trim(analysis_filename2d(k)) == 'NOT USED') then - analysis_filename2d(k) = analysis_filename(k) - endif - end do ! do k = 1, ngrids - write(6,*) 'ANALYSIS_FILENAME2D = ' - do k = 1, ngrids - write(6,*) trim(adjustl(analysis_filename2d(k))) - end do ! do k = 1, ngrids - write(6,*) 'VARIABLE_TABLE = ', & - & trim(adjustl(variable_table)) - write(6,*) 'FORECAST_TIMESTAMP = ', forecast_timestamp - write(6,*) 'OUTPUT DATAPATH (2d) = ', & - & trim(adjustl(datapathout2d)) - write(6,*) 'OUTPUT DATAPATH (3d) = ', & - & trim(adjustl(datapathout3d)) - write(6,*) 'NEMSIO_OPT (2d) = ', nemsio_opt2d - write(6,*) 'NEMSIO_OPT (3d) = ', nemsio_opt3d - write(6,*) '/' - write(6,*) '&INTERPIO' - write(6,*) 'ESMF_BILINEAR_FILENAME = ', & - & trim(adjustl(esmf_bilinear_filename)) - write(6,*) 'ESMF_NEARESTSTOD_FILENAME = ', & - & trim(adjustl(esmf_neareststod_filename)) - write(6,*) 'GFS_HYBLEVS_FILENAME = ', & - & trim(adjustl(gfs_hyblevs_filename)) - write(6,*) '/' - - end if ! if(mpi_procid .eq. mpi_masternode) - - ! Define local variables - - call mpi_barrier(mpi_comm_world,mpi_ierror) - - !=================================================================== - - ! Define format statements - -500 format('NAMELISTPARAMS: regrid-nemsio.input not found in the', & - & ' current working directory. ABORTING!!!!') - - !=================================================================== - - end subroutine namelistparams - - !===================================================================== - -end module namelist_def diff --git a/sorc/regrid_nemsio.fd/netcdfio_interface.f90 b/sorc/regrid_nemsio.fd/netcdfio_interface.f90 deleted file mode 100644 index 473b765c50..0000000000 --- a/sorc/regrid_nemsio.fd/netcdfio_interface.f90 +++ /dev/null @@ -1,592 +0,0 @@ -module netcdfio_interface - - !======================================================================= - - ! Define associated modules and subroutines - - !----------------------------------------------------------------------- - - use kinds - - !----------------------------------------------------------------------- - - use namelist_def - use netcdf - use mpi_interface - - !----------------------------------------------------------------------- - - implicit none - - !----------------------------------------------------------------------- - - ! Define global variables - - logical :: ncstatic - integer :: ncrec - integer :: ncxdim - integer :: ncydim - integer :: nczdim - integer :: nctdim - integer :: ncfileid - integer :: ncvarid - integer :: ncdimid - integer :: ncstatus - - !----------------------------------------------------------------------- - - ! Define interfaces and attributes for module routines - - private - interface netcdfio_values_1d - module procedure netcdfio_values_1d_dblepr - module procedure netcdfio_values_1d_realpr - module procedure netcdfio_values_1d_intepr - end interface ! interface netcdfio_values_2d - interface netcdfio_values_2d - module procedure netcdfio_values_2d_dblepr - module procedure netcdfio_values_2d_realpr - module procedure netcdfio_values_2d_intepr - end interface ! interface netcdfio_values_2d - interface netcdfio_values_3d - module procedure netcdfio_values_3d_dblepr - module procedure netcdfio_values_3d_realpr - module procedure netcdfio_values_3d_intepr - end interface ! interface netcdfio_values_3d - interface netcdfio_global_attr - module procedure netcdfio_global_attr_char - end interface ! interface netcdfio_global_attr - interface netcdfio_variable_attr - module procedure netcdfio_variable_attr_char - end interface ! interface netcdfio_variable_attr - public :: netcdfio_values_1d - public :: netcdfio_values_2d - public :: netcdfio_values_3d - public :: netcdfio_dimension - public :: netcdfio_global_attr - public :: netcdfio_variable_attr - public :: ncrec - public :: ncxdim - public :: ncydim - public :: nczdim - public :: nctdim - public :: ncstatic - - !----------------------------------------------------------------------- - -contains - - !======================================================================= - - ! netcdfio_global_attr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_global_attr_char(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - character(len=*) :: varvalue - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_get_att(ncfileid,nf90_global,trim(adjustl(varname)), & - & varvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - end subroutine netcdfio_global_attr_char - - subroutine netcdfio_variable_attr_char(filename,varname,attribute,varvalue) - - implicit none - - !======================================================================= - - ! Define variables passed to subroutine - - character(len=500),intent(in) :: filename - character(len=*),intent(in) :: attribute - character(len=*),intent(in) :: varname - - ! Define variables returned by subroutine - - character(len=80),intent(out) :: varvalue - - ! Define variables for decoding netCDF data - - integer ncid, varid, ncstatus - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite,ncid=ncid) - ncstatus = nf90_inq_varid(ncid,trim(adjustl(varname)),varid) - ncstatus = nf90_get_att(ncid,varid,trim(adjustl(attribute)),varvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - end subroutine netcdfio_variable_attr_char - - !======================================================================= - - ! netcdfio_values_1d_dblepr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_1d_dblepr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - real(r_double) :: varvalue(:) - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if (ncstatus /= 0) then - varvalue = -1.e30 - else - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - endif - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - end subroutine netcdfio_values_1d_dblepr - - !======================================================================= - - ! netcdfio_values_2d_dblepr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_2d_dblepr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - real(r_double), dimension(ncxdim,ncydim) :: varvalue - - ! Define variables computed within routine - - integer, dimension(3) :: start - integer, dimension(3) :: count - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(ncstatic) start = (/1,1,1/) - if(.not. ncstatic) start = (/1,1,ncrec/) - count = (/ncxdim,ncydim,1/) - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue,start,count) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(debug) write(6,500) trim(adjustl(varname)), minval(varvalue), & - & maxval(varvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - ! Define format statements - -500 format('NETCDFIO_VALUES_2D: Variable name = ', a, '; (min,max) = (', & - & f13.5,',',f13.5,').') - - !===================================================================== - - end subroutine netcdfio_values_2d_dblepr - - !======================================================================= - - ! netcdfio_values_3d_dblepr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_3d_dblepr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - real(r_double), dimension(ncxdim,ncydim,nczdim) :: varvalue - - ! Define variables computed within routine - - integer, dimension(4) :: start - integer, dimension(4) :: count - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(ncstatic) start = (/1,1,1,1/) - if(.not. ncstatic) start = (/1,1,1,ncrec/) - count = (/ncxdim,ncydim,nczdim,1/) - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue,start,count) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(debug) write(6,500) trim(adjustl(varname)), minval(varvalue), & - & maxval(varvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - ! Define format statements - -500 format('NETCDFIO_VALUES_3D: Variable name = ', a, '; (min,max) = (', & - & f13.5,',',f13.5,').') - - !===================================================================== - - end subroutine netcdfio_values_3d_dblepr - - !======================================================================= - - ! netcdfio_values_1d_realpr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_1d_realpr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - real(r_kind) :: varvalue(:) - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if (ncstatus /= 0) then - varvalue = -1.e30 - else - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - endif - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - end subroutine netcdfio_values_1d_realpr - - !======================================================================= - - ! netcdfio_values_2d_realpr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_2d_realpr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - real(r_kind), dimension(ncxdim,ncydim) :: varvalue - - ! Define variables computed within routine - - integer, dimension(3) :: start - integer, dimension(3) :: count - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(ncstatic) start = (/1,1,1/) - if(.not. ncstatic) start = (/1,1,ncrec/) - count = (/ncxdim,ncydim,1/) - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue,start,count) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(debug) write(6,500) trim(adjustl(varname)), minval(varvalue), & - & maxval(varvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - ! Define format statements - -500 format('NETCDFIO_VALUES_2D: Variable name = ', a, '; (min,max) = (', & - & f13.5,',',f13.5,').') - - !===================================================================== - - end subroutine netcdfio_values_2d_realpr - - !======================================================================= - - ! netcdfio_values_3d_realpr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_3d_realpr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - real(r_kind), dimension(ncxdim,ncydim,nczdim) :: varvalue - - ! Define variables computed within routine - - integer, dimension(4) :: start - integer, dimension(4) :: count - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(ncstatic) start = (/1,1,1,1/) - if(.not. ncstatic) start = (/1,1,1,ncrec/) - count = (/ncxdim,ncydim,nczdim,1/) - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue,start,count) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(debug) write(6,500) trim(adjustl(varname)), minval(varvalue), & - & maxval(varvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - ! Define format statements - -500 format('NETCDFIO_VALUES_3D: Variable name = ', a, '; (min,max) = (', & - & f13.5,',',f13.5,').') - - !===================================================================== - - end subroutine netcdfio_values_3d_realpr - - !======================================================================= - - ! netcdfio_values_1d_intepr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_1d_intepr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - integer :: varvalue(:) - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if (ncstatus /= 0) then - varvalue = -9999 - else - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue) - endif - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - end subroutine netcdfio_values_1d_intepr - - !======================================================================= - - ! netcdfio_values_2d_intepr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_2d_intepr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - integer, dimension(ncxdim,ncydim) :: varvalue - - ! Define variables computed within routine - - integer, dimension(3) :: start - integer, dimension(3) :: count - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if(ncstatic) start = (/1,1,1/) - if(.not. ncstatic) start = (/1,1,ncrec/) - count = (/ncxdim,ncydim,1/) - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue,start,count) - if(debug) write(6,500) trim(adjustl(varname)), minval(varvalue), & - & maxval(varvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - ! Define format statements - -500 format('NETCDFIO_VALUES_2D: Variable name = ', a, '; (min,max) = (', & - & f13.5,',',f13.5,').') - - !===================================================================== - - end subroutine netcdfio_values_2d_intepr - - !======================================================================= - - ! netcdfio_values_3d_intepr.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_values_3d_intepr(filename,varname,varvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: varname - integer, dimension(ncxdim,ncydim,nczdim) :: varvalue - - ! Define variables computed within routine - - integer, dimension(4) :: start - integer, dimension(4) :: count - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_varid(ncfileid,trim(adjustl(varname)),ncvarid) - if (ncstatus .ne. 0) then - print *,'fv3 read failed for ',trim(adjustl(varname)) - call mpi_interface_terminate() - stop - endif - if(ncstatic) start = (/1,1,1,1/) - if(.not. ncstatic) start = (/1,1,1,ncrec/) - count = (/ncxdim,ncydim,nczdim,1/) - ncstatus = nf90_get_var(ncfileid,ncvarid,varvalue,start,count) - if(debug) write(6,500) trim(adjustl(varname)), minval(varvalue), & - & maxval(varvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - ! Define format statements - -500 format('NETCDFIO_VALUES_3D: Variable name = ', a, '; (min,max) = (', & - & f13.5,',',f13.5,').') - - !===================================================================== - - end subroutine netcdfio_values_3d_intepr - - !======================================================================= - - ! netcdfio_dimension.f90: - - !----------------------------------------------------------------------- - - subroutine netcdfio_dimension(filename,dimname,dimvalue) - - ! Define variables passed to routine - - character(len=500) :: filename - character(len=*) :: dimname - integer :: dimvalue - - !===================================================================== - - ! Define local variables - - ncstatus = nf90_open(path=trim(adjustl(filename)),mode=nf90_nowrite, & - & ncid=ncfileid) - ncstatus = nf90_inq_dimid(ncfileid,trim(adjustl(dimname)),ncdimid) - ncstatus = nf90_inquire_dimension(ncfileid,ncdimid,len=dimvalue) - ncstatus = nf90_close(ncfileid) - - !===================================================================== - - end subroutine netcdfio_dimension - - !======================================================================= - -end module netcdfio_interface diff --git a/sorc/regrid_nemsio.fd/physcons.f90 b/sorc/regrid_nemsio.fd/physcons.f90 deleted file mode 100644 index 4e69dca337..0000000000 --- a/sorc/regrid_nemsio.fd/physcons.f90 +++ /dev/null @@ -1,77 +0,0 @@ -! this module contains some the most frequently used math and ! -! physics constatns for gcm models. ! -! ! -! references: ! -! as set in NMC handbook from Smithsonian tables. ! -! ! - module physcons -! - use kinds, only : r_kind -! - implicit none -! - public - -! --- ... Math constants - - real(r_kind),parameter:: con_pi =3.1415926535897931 ! pi - real(r_kind),parameter:: con_sqrt2 =1.414214e+0 ! square root of 2 - real(r_kind),parameter:: con_sqrt3 =1.732051e+0 ! square root of 3 - -! --- ... Geophysics/Astronomy constants - - real(r_kind),parameter:: con_rerth =6.3712e+6 ! radius of earth (m) - real(r_kind),parameter:: con_g =9.80665e+0 ! gravity (m/s2) - real(r_kind),parameter:: con_omega =7.2921e-5 ! ang vel of earth (1/s) - real(r_kind),parameter:: con_p0 =1.01325e5 ! std atms pressure (pa) - real(r_kind),parameter:: con_solr =1.3660e+3 ! solar constant (W/m2)-liu(2002) - -! --- ... Thermodynamics constants - - real(r_kind),parameter:: con_rgas =8.314472 ! molar gas constant (J/mol/K) - real(r_kind),parameter:: con_rd =2.8705e+2 ! gas constant air (J/kg/K) - real(r_kind),parameter:: con_rv =4.6150e+2 ! gas constant H2O (J/kg/K) - real(r_kind),parameter:: con_cp =1.0046e+3 ! spec heat air @p (J/kg/K) - real(r_kind),parameter:: con_cv =7.1760e+2 ! spec heat air @v (J/kg/K) - real(r_kind),parameter:: con_cvap =1.8460e+3 ! spec heat H2O gas (J/kg/K) - real(r_kind),parameter:: con_cliq =4.1855e+3 ! spec heat H2O liq (J/kg/K) - real(r_kind),parameter:: con_csol =2.1060e+3 ! spec heat H2O ice (J/kg/K) - real(r_kind),parameter:: con_hvap =2.5000e+6 ! lat heat H2O cond (J/kg) - real(r_kind),parameter:: con_hfus =3.3358e+5 ! lat heat H2O fusion (J/kg) - real(r_kind),parameter:: con_psat =6.1078e+2 ! pres at H2O 3pt (Pa) - real(r_kind),parameter:: con_t0c =2.7315e+2 ! temp at 0C (K) - real(r_kind),parameter:: con_ttp =2.7316e+2 ! temp at H2O 3pt (K) - real(r_kind),parameter:: con_tice =2.7120e+2 ! temp freezing sea (K) - real(r_kind),parameter:: con_jcal =4.1855E+0 ! joules per calorie () - real(r_kind),parameter:: con_rhw0 =1022.0 ! sea water reference density (kg/m^3) - real(r_kind),parameter:: con_epsq =1.0E-12 ! min q for computing precip type - -! Secondary constants - - real(r_kind),parameter:: con_rocp =con_rd/con_cp - real(r_kind),parameter:: con_cpor =con_cp/con_rd - real(r_kind),parameter:: con_rog =con_rd/con_g - real(r_kind),parameter:: con_fvirt =con_rv/con_rd-1. - real(r_kind),parameter:: con_eps =con_rd/con_rv - real(r_kind),parameter:: con_epsm1 =con_rd/con_rv-1. - real(r_kind),parameter:: con_dldt =con_cvap-con_cliq - real(r_kind),parameter:: con_xpona =-con_dldt/con_rv - real(r_kind),parameter:: con_xponb =-con_dldt/con_rv+con_hvap/(con_rv*con_ttp) - -! --- ... Other Physics/Chemistry constants (source: 2002 CODATA) - - real(r_kind),parameter:: con_c =2.99792458e+8 ! speed of light (m/s) - real(r_kind),parameter:: con_plnk =6.6260693e-34 ! planck constatn (J/s) - real(r_kind),parameter:: con_boltz =1.3806505e-23 ! boltzmann constant (J/K) - real(r_kind),parameter:: con_sbc =5.670400e-8 ! stefan-boltzmann (W/m2/K4) - real(r_kind),parameter:: con_avgd =6.0221415e23 ! avogadro constant (1/mol) - real(r_kind),parameter:: con_gasv =22413.996e-6 ! vol of ideal gas at 273.15k, 101.325kpa (m3/mol) - real(r_kind),parameter:: con_amd =28.9644 ! molecular wght of dry air (g/mol) - real(r_kind),parameter:: con_amw =18.0154 ! molecular wght of water vapor (g/mol) - real(r_kind),parameter:: con_amo3 =47.9982 ! molecular wght of o3 (g/mol) - real(r_kind),parameter:: con_amco2 =44.011 ! molecular wght of co2 (g/mol) - real(r_kind),parameter:: con_amo2 =31.9999 ! molecular wght of o2 (g/mol) - real(r_kind),parameter:: con_amch4 =16.043 ! molecular wght of ch4 (g/mol) - real(r_kind),parameter:: con_amn2o =44.013 ! molecular wght of n2o (g/mol) - -end module physcons diff --git a/sorc/regrid_nemsio.fd/regrid_nemsio_interface.f90 b/sorc/regrid_nemsio.fd/regrid_nemsio_interface.f90 deleted file mode 100644 index 9ab5597af8..0000000000 --- a/sorc/regrid_nemsio.fd/regrid_nemsio_interface.f90 +++ /dev/null @@ -1,50 +0,0 @@ -module regrid_nemsio_interface - - !======================================================================= - - ! Define associated modules and subroutines - - !----------------------------------------------------------------------- - - use constants - use kinds - - !----------------------------------------------------------------------- - - use fv3_interface - use gfs_nems_interface - use namelist_def - - !----------------------------------------------------------------------- - - implicit none - - !----------------------------------------------------------------------- - -contains - - !======================================================================= - - ! regrid_nemsio.f90: - - !----------------------------------------------------------------------- - - subroutine regrid_nemsio() - - !===================================================================== - - ! Define local variables - - call namelistparams() - - ! Check local variable and proceed accordingly - - call fv3_regrid_nemsio() - - !===================================================================== - - end subroutine regrid_nemsio - - !======================================================================= - -end module regrid_nemsio_interface diff --git a/sorc/regrid_nemsio.fd/variable_interface.f90 b/sorc/regrid_nemsio.fd/variable_interface.f90 deleted file mode 100644 index d0d568429d..0000000000 --- a/sorc/regrid_nemsio.fd/variable_interface.f90 +++ /dev/null @@ -1,66 +0,0 @@ -module variable_interface - - !======================================================================= - - ! Define associated modules and subroutines - - !----------------------------------------------------------------------- - - use kinds - use physcons, only: rgas => con_rd, cp => con_cp, grav => con_g, & - & rerth => con_rerth, rocp => con_rocp, & - & pi => con_pi, con_rog - - !----------------------------------------------------------------------- - - use mpi_interface - use namelist_def - - !----------------------------------------------------------------------- - - implicit none - - !----------------------------------------------------------------------- - - ! Define interfaces and attributes for module routines - - private - public :: varinfo - !public :: variable_lookup - public :: variable_clip - - !----------------------------------------------------------------------- - - ! Define all data and structure types for routine; these variables - ! are variables required by the subroutines within this module - - type varinfo - character(len=20) :: var_name - character(len=20) :: nems_name - character(len=20) :: nems_levtyp - integer :: nems_lev - character(len=20) :: itrptyp - logical :: clip - integer :: ndims - end type varinfo ! type varinfo - - !----------------------------------------------------------------------- - -contains - - !======================================================================= - - subroutine variable_clip(grid) - - - real(r_double) :: grid(:) - real(r_double) :: clip - - clip = tiny(grid(1)) - where(grid .le. dble(0.0)) grid = clip - - end subroutine variable_clip - - !======================================================================= - -end module variable_interface diff --git a/sorc/rsync_gsl.sh b/sorc/rsync_gsl.sh new file mode 100755 index 0000000000..9b56c204bf --- /dev/null +++ b/sorc/rsync_gsl.sh @@ -0,0 +1,12 @@ +#!/bin/sh + +## +## this script copies over GSL changes not in ufs-community/ufs-weather-model repository +## +for dir in ufs_model.fd ufs_utils.fd; do + if [[ -d ${dir}_gsl ]]; then + echo "syncing ${dir}_gsl...." + rsync -avx ${dir}_gsl/ ${dir}/ + fi +done + diff --git a/sorc/supvit.fd/makefile b/sorc/supvit.fd/makefile deleted file mode 100644 index 288e42beff..0000000000 --- a/sorc/supvit.fd/makefile +++ /dev/null @@ -1,31 +0,0 @@ -SHELL= /bin/sh -ISIZE = 4 -RSIZE = 8 -COMP= ifort -##LIBS_SUP= -L/contrib/nceplibs/nwprod/lib -lw3emc_d -lw3nco_d -lg2_d -lbacio_4 -ljasper -lpng -lz -LDFLAGS= -##ccs FFLAGS= -O -qflttrap=ov:zero:inv:enable -qcheck -qextchk -qwarn64 -qintsize=$(ISIZE) -qrealsize=$(RSIZE) -# FFLAGS= -O2 -check bounds -check format -xHost -fpe0 -# DEBUG= -check bounds -check format -FFLAGS= -O2 -g -i$(ISIZE) -r$(RSIZE) - -supvit: supvit_main.f supvit_modules.o - @echo " " - @echo " Compiling program that sorts and updates vitals records...." - $(COMP) $(FFLAGS) $(LDFLAGS) supvit_modules.o supvit_main.f $(LIBS_SUP) -o supvit - @echo " " - -supvit_modules.o: supvit_modules.f - @echo " " - @echo " Compiling the modules....." - $(COMP) -c supvit_modules.f -o supvit_modules.o - @echo " " - -CMD = supvit - -clean: - -rm -f *.o *.mod - -install: - mv $(CMD) ../../exec/$(CMD) - diff --git a/sorc/supvit.fd/supvit_main.f b/sorc/supvit.fd/supvit_main.f deleted file mode 100644 index 1484e4efeb..0000000000 --- a/sorc/supvit.fd/supvit_main.f +++ /dev/null @@ -1,865 +0,0 @@ - program sort_and_update_vitals -c -c$$$ MAIN PROGRAM DOCUMENTATION BLOCK -c -c Main Program: SUPVIT Sort and Update Vitals File -C PRGMMR: MARCHOK ORG: NP22 DATE: 1999-04-14 -c -c ABSTRACT: This program searches through the TC Vitals file and reads -c the records for a particular dtg. It contains logic to eliminate -c duplicate records and only keep the most recent one (see further -c documentation below). It also searches to see if a storm was -c included in the Vitals file 6 hours earlier (or 3 hours earlier -c if we're tracking with the off-synoptic-time SREF) but is missing -c from the current Vitals records. In this case, the program assumes -c that the regional forecasting center was late in reporting the -c current position, and it includes the old Vitals record with -c the current Vitals records. This program will also take the -c position and heading from that old vitals record and extrapolate the -c information to get a current first guess estimate of the storm's -c position. By the way, if a storm was found 3 or 6 hours earlier, -c logic is also included to eliminate any duplicate records of that -c storm in those old records. Finally, if it turns out that the -c reason an old vitals is no longer on the current records is that -c the storm has dissipated, don't worry about including it to be -c passed into the tracking program; the tracking program will not be -c able to track it and that'll be the end of it. -c -c Program history log: -c 98-03-26 Marchok - Original operational version. -c 99-04-01 Marchok - Modified code to be able to read the year off -c of the TC Vitals card as a 4-digit integer, -c instead of as a 2-digit integer. -c 00-06-13 Marchok - Modified code to be able to read vitals from 6h -c ahead (this is for use in the GDAS tropical -c cyclone relocation system). -c 04-05-27 Marchok - Modified code to be able to read vitals from 3h -c ago. This is for tracking with the 09z and 21z -c SREF ensemble. Since there are no vitals at -c these offtimes, we need to update vitals from -c the synoptic times 3h earlier. -c -c Input files: -c unit 31 Text file containing all vitals (including duplicates) -c for current time and time from 3 or 6 hours ago and -c 3 or 6 hours ahead. -c Output files: -c unit 51 Text file containing sorted, updated vitals (without -c any duplicates) valid at the current time only. -c -c Subprograms called: -c read_nlists Read input namelists for input dates -c read_tcv_file Read TC vitals file to get initial storm positions -c delete_dups Delete duplicate TC vitals records from current time -c delete_old Delete records from 6h ago if current record exists -c delete_old_dups Delete duplicate records from 6h ago time -c update_old_vits Update position of storms from 6h ago positions -c output Output 1 record for each updated vitals record -c -c Attributes: -c Language: Fortran_90 -c -c$$$ -c -c------- -c -c - USE def_vitals; USE set_max_parms; USE inparms; USE date_checks - USE trig_vals -c - type (tcvcard) storm(maxstorm) - type (datecard) dnow, dold, dfuture - - logical okstorm(maxstorm) - integer vit_hr_incr -c - call w3tagb('SUPVIT ',1999,0104,0058,'NP22 ') -c - okstorm = .FALSE. -c - pi = 4. * atan(1.) ! pi, dtr and rtd were declared in module - dtr = pi/180.0 ! trig_vals, but were not yet defined. - rtd = 180.0/pi -c -c ----------------------------------------- -c Read namelists to get date information -c - call read_nlists (dnow,dold,dfuture,vit_hr_incr) -c -c ----------------------------------------------------------- -c Read in storm cards for current time and delete duplicates -c - - inowct = 0 - call read_tcv_file (storm,ymd_now,hhmm_now,inowct,okstorm) - - if (inowct > 0) then - call delete_dups (storm,inowct,okstorm) - else - print *,' ' - print *,'!!! No storms on tcv card for current time.' - print *,'!!! A check will be made for old tcv storm cards,' - print *,'!!! and if any exist, the positions will be updated' - print *,'!!! (extrapolated) to get a first guess position for' - print *,'!!! the current time.' - print *,'!!! Current forecast time = ',ymd_now,hhmm_now - print *,'!!! Old forecast time = ',ymd_old,hhmm_old - endif -c -c ----------------------------------------------------------- -c Read in storm cards for 3h or 6h ago and delete duplicates -c - rewind (31) - itempct = inowct - call read_tcv_file (storm,ymd_old,hhmm_old,itempct,okstorm) - ioldct = itempct - inowct - - if (ioldct > 0) then - if (inowct > 0) then - call delete_old (storm,inowct,ioldct,okstorm) - endif - call delete_old_dups (storm,inowct,ioldct,okstorm) - endif - -c ---------------------------------------------------------------- -c Now update any vitals records left from 3h or 6h ago by -c extrapolating their positions ahead to the current time. - - if (ioldct > 0) then - call update_old_vits (storm,inowct,ioldct,okstorm,vit_hr_incr) - endif - - -c -------------------------------------------------------------- -c Read in storm cards for 3h or 6h ahead and delete duplicates. -c This is used for Qingfu's vortex relocation purposes. If he is -c doing the analysis/relocation for, say, 12z, he looks at the -c first guess files from the 06z cycle and tracks from there. -c But suppose there is a storm whose first tcvitals card is -c issued at 12z; then we would have no tcvitals card at 06z for -c the tracker to use. So this next part reads the vitals from -c the cycle 6h ahead and, if it finds any vitals that were not -c included with the current time's vitals, then it extrapolates -c those vitals from the next cycle *backwards* to the current -c time. By the way, itempct is input/output for the read -c routine. Going in, it contains the count of the number of -c records read in so far. In that read routine, itempct is -c incremented for every valid record read for the input time. - - rewind (31) - iprevct = inowct + ioldct - call read_tcv_file (storm,ymd_future,hhmm_future,itempct,okstorm) - ifuturect = itempct - iprevct - - print *,'before d6a if, ifuturect = ',ifuturect,' iprevct= ' - & ,iprevct - print *,'before d6a if, inowct = ',inowct,' ioldct= ',ioldct - - if (ifuturect > 0) then - if (iprevct > 0) then - call delete_future (storm,iprevct,ifuturect,okstorm) - endif - call delete_future_dups (storm,iprevct,ifuturect,okstorm) - endif - -c ---------------------------------------------------------------- -c Now update any vitals records not filtered out from 3h or 6h -c ahead by extrapolating their future positions *backwards* to -c the current time. - - if (ifuturect > 0) then - call update_future_vits (storm,iprevct,ifuturect,okstorm - & ,vit_hr_incr) - endif - - -c --------------------------------------------------------- -c Now output all of the sorted, updated TC Vitals records - - itotalct = inowct + ioldct + ifuturect - call output (storm,itotalct,okstorm) -c - call w3tage('SUPVIT ') - stop - end -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine read_tcv_file (storm,ymd,hhmm,ict,okstorm) -c -c ABSTRACT: This routine reads in the TC Vitals file, and stores -c into an array those records that match the input ymd and hhmm. -c -c INPUT: -c -c ict Tells at what index in the storm array to begin reading -c the input records into. This is important because this -c subroutine is called twice; the first time the data are -c for the current time and are just started at ict = 0, -c but the second time it's called we're getting the 6h ago -c data, and they have to be added onto the end of the -c array, so we need to know where the current time's data -c ends so we know what index to start the 6h ago data. -c - USE def_vitals; USE set_max_parms -c - type (tcvcard) storm(maxstorm), ts -c - integer ymd,hhmm - logical okstorm(maxstorm) -c - lucard = 31 - - print *,' ' - print '(a26,i6.6,a8,i4.4)',' IN READ_TCV_FILE: , ymd= ',ymd - & ,' hhmm= ',hhmm - print *,' ' - - - do while (.true.) - read (lucard,21,END=801,ERR=891) ts - if (ts%tcv_yymmdd == ymd .and. ts%tcv_hhmm == hhmm) then - ict = ict + 1 - storm(ict) = ts - okstorm(ict) = .TRUE. - write (6,23) ' !!! MATCH, ict= ',ict,storm(ict) - endif - enddo - 801 continue - - 21 format (a4,1x,a3,1x,a9,1x,i2,i6,1x,i4,1x,i3,a1,1x,i4,a1,1x,i3,1x - & ,i3,a85) - 23 format (a18,i3,2x,a4,1x,a3,1x,a9,1x,i2,i6.6,1x,i4.4,1x,i3,a1,1x,i4 - & ,a1,1x,i3,1x,i3,a85) - - iret = 0 - return - - 891 print *,'!!! ERROR in program sort_and_update_vitals. Error ' - print *,'!!! occurred in read_tcv_file while reading unit ',lucard - iret = 98 - - return - end -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine output (storm,itotalct,okstorm) -c - USE def_vitals; USE set_max_parms; USE inparms -c - type (tcvcard) storm(maxstorm) - type (datecard) dnow, dold, dfuture - - logical okstorm(maxstorm) -c - lunvit = 51 - - ist = 1 - do while (ist <= itotalct) - - if (okstorm(ist)) then - if (storm(ist)%tcv_stdir == -99 .or. - & storm(ist)%tcv_stspd == -99) then - write (lunvit,23,ERR=891) storm(ist) - else - write (lunvit,21,ERR=891) storm(ist) - endif - endif - - ist = ist + 1 - - enddo - - 21 format (a4,1x,a3,1x,a9,1x,i2.2,i6.6,1x,i4.4,1x,i3.3,a1,1x,i4.4 - & ,a1,1x,i3.3,1x,i3.3,a85) - 23 format (a4,1x,a3,1x,a9,1x,i2.2,i6.6,1x,i4.4,1x,i3.3,a1,1x,i4.4 - & ,a1,1x,i3,1x,i3,a85) - - iret = 0 - return - - 891 print *,'!!! ERROR in program sort_and_update_vitals. Error ' - print *,'!!! occurred in output while writing new vitals file ' - print *,'!!! to unit number',lunvit - iret = 98 - - return - end -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine update_old_vits (storm,inowct,ioldct,okstorm - & ,vit_hr_incr) -c -c ABSTRACT: This subroutine updates the vitals from 3h or 6h ago. -c It uses the heading and direction values listed in the vitals -c record (see Module def_vitals for specfics on where to find -c heading & direction in the vitals record) to get a new -c position for the current time by extrapolating out 3h or 6h. -c - USE def_vitals; USE set_max_parms; USE inparms; USE date_checks - USE trig_vals -c - type (tcvcard) storm(maxstorm) - type (datecard) dnow, dold - - logical okstorm(maxstorm) - integer vit_hr_incr -c - ist = inowct + 1 - iend = inowct + ioldct - do while (ist <= iend) - - if (okstorm(ist) .and. storm(ist)%tcv_yymmdd == ymd_old .and. - & storm(ist)%tcv_hhmm == hhmm_old) then - - rlat = float(storm(ist)%tcv_lat) / 10. - rlon = float(storm(ist)%tcv_lon) / 10. - rhdg = float(storm(ist)%tcv_stdir) - rspd = float(storm(ist)%tcv_stspd) / 10. - -c ------------------------------------------ -c This first part updates the positions by simply -c extrapolating the current motion along the current -c heading at the current speed for 3h or 6h. Be -c careful with adding and subtracting these distances -c in the different hemispheres (see the if statements). -c Remember: In the storm message file, there are NO -c negative signs to distinguish between hemispheres, -c so a southern hemisphere latitude will be POSITIVE, -c but will be distinguished by the 'S'. - - strmucomp = rspd * sin(dtr*rhdg) - strmvcomp = rspd * cos(dtr*rhdg) -c - vdistdeg = (strmvcomp * secphr * vit_hr_incr) / dtk - if (storm(ist)%tcv_latns == 'N') then - rnewlat = rlat + vdistdeg - else - rnewlat = rlat - vdistdeg - endif -c - avglat = 0.5 * (rlat + rnewlat) - cosfac = cos(dtr * avglat) - udistdeg = (strmucomp * secphr * vit_hr_incr) / (dtk * cosfac) - if (storm(ist)%tcv_lonew == 'W') then - rnewlon = rlon - udistdeg - else - rnewlon = rlon + udistdeg - endif - -c ------------------------------------------ -c This part updates the E/W and N/S characters -c in the event that a storm changes hemisphere. -c (N to S and S to N is not really possible, but -c we'll include the code anyway). If a storm -c does change hemisphere, say from W to E at 180, -c we need to also adjust the new longitude value -c from say 186W to 174E. Have to include this -c code since storm messages contain longitudes on -c a 0-180 basis (E&W), NOT 0-360. - - if (storm(ist)%tcv_latns == 'N') then - if (rnewlat < 0.) then - storm(ist)%tcv_latns = 'S' - rnewlat = -1. * rnewlat - endif - else - if (rnewlat < 0.) then - storm(ist)%tcv_latns = 'N' - rnewlat = -1. * rnewlat - endif - endif -c - if (storm(ist)%tcv_lonew == 'W') then - if (rnewlon > 180.) then - storm(ist)%tcv_lonew = 'E' - rnewlon = 180. - abs(rnewlon - 180.) - endif - else - if (rnewlon > 180.) then - storm(ist)%tcv_lonew = 'W' - rnewlon = 180. - abs(rnewlon - 180.) - endif - endif - - storm(ist)%tcv_lat = int ((rnewlat + 0.05) * 10.) - storm(ist)%tcv_lon = int ((rnewlon + 0.05) * 10.) - storm(ist)%tcv_yymmdd = ymd_now - storm(ist)%tcv_hhmm = hhmm_now - - endif - - ist = ist + 1 - - enddo -c - return - end - -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine update_future_vits (storm,iprevct,ifuturect,okstorm - & ,vit_hr_incr) -c -c ABSTRACT: This subroutine updates the vitals from 3h or 6h ahead. -c It uses the heading and direction values listed in the vitals -c record (see Module def_vitals for specfics on where to find -c heading & direction in the vitals record) to get a new -c position for the current time by extrapolating *BACKWARDS* -c 3h or 6h to the current time. -c - USE def_vitals; USE set_max_parms; USE inparms; USE date_checks - USE trig_vals -c - type (tcvcard) storm(maxstorm) - type (datecard) dnow, dold, dfuture - - logical okstorm(maxstorm) - integer vit_hr_incr -c - ist = iprevct + 1 - iend = iprevct + ifuturect - do while (ist <= iend) - - if (okstorm(ist) .and. storm(ist)%tcv_yymmdd == ymd_future .and. - & storm(ist)%tcv_hhmm == hhmm_future) then - - rlat = float(storm(ist)%tcv_lat) / 10. - rlon = float(storm(ist)%tcv_lon) / 10. - rhdg = float(storm(ist)%tcv_stdir) - rspd = float(storm(ist)%tcv_stspd) / 10. - -c IMPORTANT NOTE: Since we are extrapolating *BACKWARDS* in -c time in this routine, we have to take that value of the -c storm heading in rhdg and switch it by 180 degrees so that -c we will be pointing back in the direction the storm came -c from.... - - if (rhdg >= 0. .and. rhdg <= 180.) then - rhdg = rhdg + 180. - else - rhdg = rhdg - 180. - endif - -c ------------------------------------------ -c This first part updates the positions by simply -c extrapolating the current motion along the REVERSE of -c the current heading at the current speed for 6 hours. -c Be careful with adding and subtracting these distances -c in the different hemispheres (see the if statements). -c Remember: In the storm message file, there are NO -c negative signs to distinguish between hemispheres, -c so a southern hemisphere latitude will be POSITIVE, -c but will be distinguished by the 'S'. - - strmucomp = rspd * sin(dtr*rhdg) - strmvcomp = rspd * cos(dtr*rhdg) -c - vdistdeg = (strmvcomp * secphr * vit_hr_incr) / dtk - if (storm(ist)%tcv_latns == 'N') then - rnewlat = rlat + vdistdeg - else - rnewlat = rlat - vdistdeg - endif -c - avglat = 0.5 * (rlat + rnewlat) - cosfac = cos(dtr * avglat) - udistdeg = (strmucomp * secphr * vit_hr_incr) / (dtk * cosfac) - if (storm(ist)%tcv_lonew == 'W') then - rnewlon = rlon - udistdeg - else - rnewlon = rlon + udistdeg - endif - -c ------------------------------------------ -c This part updates the E/W and N/S characters -c in the event that a storm changes hemisphere. -c (N to S and S to N is not really possible, but -c we'll include the code anyway). If a storm -c does change hemisphere, say from W to E at 180, -c we need to also adjust the new longitude value -c from say 186W to 174E. Have to include this -c code since storm messages contain longitudes on -c a 0-180 basis (E&W), NOT 0-360. - - if (storm(ist)%tcv_latns == 'N') then - if (rnewlat < 0.) then - storm(ist)%tcv_latns = 'S' - rnewlat = -1. * rnewlat - endif - else - if (rnewlat < 0.) then - storm(ist)%tcv_latns = 'N' - rnewlat = -1. * rnewlat - endif - endif -c - if (storm(ist)%tcv_lonew == 'W') then - if (rnewlon > 180.) then - storm(ist)%tcv_lonew = 'E' - rnewlon = 180. - abs(rnewlon - 180.) - endif - else - if (rnewlon > 180.) then - storm(ist)%tcv_lonew = 'W' - rnewlon = 180. - abs(rnewlon - 180.) - endif - endif - - storm(ist)%tcv_lat = int ((rnewlat + 0.05) * 10.) - storm(ist)%tcv_lon = int ((rnewlon + 0.05) * 10.) - storm(ist)%tcv_yymmdd = ymd_now - storm(ist)%tcv_hhmm = hhmm_now - - endif - - ist = ist + 1 - - enddo -c - return - end - -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine delete_old_dups (storm,inowct,ioldct,okstorm) -c -c ABSTRACT: The purpose of this subroutine is to loop through the -c list of storms for the dtg from 3h or 6h ago to eliminate any -c duplicates. Be sure to sort based on storm identifier (e.g., -c 13L) instead of storm name, since the name may change (e.g., -c from "THIRTEEN" to "IRIS") for an upgrade in intensity, but the -c storm number identifier will remain the same. -c -c ict Total number of storm card entries for this dtg -c - USE def_vitals; USE set_max_parms -c - type (tcvcard) storm(maxstorm) - logical okstorm(maxstorm) - character found_dup*1 -c - ist = inowct + 1 - iend = inowct + ioldct - do while (ist < iend) - - isortnum = ist + 1 - found_dup = 'n' - if (okstorm(ist)) then - - do while (isortnum <= iend .and. found_dup == 'n') - - if (storm(ist)%tcv_storm_id == storm(isortnum)%tcv_storm_id) - & then - found_dup = 'y' - endif - isortnum = isortnum + 1 - - enddo - - endif - - if (found_dup == 'y') then - okstorm(ist) = .FALSE. - endif - - ist = ist + 1 - - enddo - -c NOTE: The last member of the array to be checked is okay, -c since all potential duplicates for this record were eliminated -c in the previous sort while loop just completed, and, further, -c the last member of this array is either already FALSE (from -c being checked off in delete_old), or it's TRUE because it -c didn't get checked off in delete_old, so keep it. - - return - end -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine delete_old (storm,inowct,ioldct,okstorm) -c -c ABSTRACT: This subroutine compares the list of storm card entries -c from 3h or 6h ago to those from the current time to eliminate -c any matching storms (i.e., if we've got a current record for a -c storm, we obviously don't need the old one). -c - USE def_vitals; USE set_max_parms -c - type (tcvcard) storm(maxstorm) -c - logical okstorm(maxstorm) - character found_dup*1 -c - ist = inowct + 1 - iend = inowct + ioldct - do while (ist <= iend) - - isortnum = 1 - found_dup = 'n' - do while (isortnum <= inowct .and. found_dup == 'n') - - if (storm(ist)%tcv_storm_id == storm(isortnum)%tcv_storm_id) - & then - found_dup = 'y' - endif - isortnum = isortnum + 1 - - enddo - - if (found_dup == 'y') then - okstorm(ist) = .FALSE. - endif - - ist = ist + 1 - - enddo - - return - end - -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine delete_future (storm,iprevct,ifuturect,okstorm) -c -c ABSTRACT: This subroutine compares the list of storm card entries -c from 3h or 6h ahead to those from the current time and from 3h or -c 6h ago to eliminate any matching storms (i.e., we only need the -c record for the future time if we don't have either a current time -c record or an old record that we've updated). -c - USE def_vitals; USE set_max_parms -c - type (tcvcard) storm(maxstorm) -c - logical okstorm(maxstorm) - character found_dup*1 -c - ist = iprevct + 1 - iend = iprevct + ifuturect - do while (ist <= iend) - - isortnum = 1 - found_dup = 'n' - do while (isortnum <= iprevct .and. found_dup == 'n') - - if (storm(ist)%tcv_storm_id == storm(isortnum)%tcv_storm_id) - & then - found_dup = 'y' - endif - isortnum = isortnum + 1 - - enddo - - if (found_dup == 'y') then - okstorm(ist) = .FALSE. - endif - - ist = ist + 1 - - enddo - - return - end - -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine delete_future_dups (storm,iprevct,ifuturect,okstorm) -c -c ABSTRACT: The purpose of this subroutine is to loop through the -c list of storms for the dtg from 3h or 6h ahead to eliminate any -c duplicates. Be sure to sort based on storm identifier (e.g., -c 13L) instead of storm name, since the name may change (e.g., -c from "THIRTEEN" to "IRIS") for an upgrade in intensity, but the -c storm number identifier will remain the same. -c -c ict Total number of storm card entries for this dtg -c - USE def_vitals; USE set_max_parms -c - type (tcvcard) storm(maxstorm) - logical okstorm(maxstorm) - character found_dup*1 -c - ist = iprevct + 1 - iend = iprevct + ifuturect - do while (ist < iend) - - isortnum = ist + 1 - found_dup = 'n' - if (okstorm(ist)) then - - do while (isortnum <= iend .and. found_dup == 'n') - - if (storm(ist)%tcv_storm_id == storm(isortnum)%tcv_storm_id) - & then - found_dup = 'y' - endif - isortnum = isortnum + 1 - - enddo - - endif - - if (found_dup == 'y') then - okstorm(ist) = .FALSE. - endif - - ist = ist + 1 - - enddo - -c NOTE: The last member of the array to be checked is okay, -c since all potential duplicates for this record were eliminated -c in the previous sort while loop just completed, and, further, -c the last member of this array is either already FALSE (from -c being checked off in delete_future), or it's TRUE because it -c didn't get checked off in delete_future, so keep it. - - return - end -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine delete_dups (storm,ict,okstorm) -c -c ABSTRACT: The purpose of this subroutine is to loop through the -c list of storms for the current dtg to eliminate any duplicates. -c Be sure to sort based on storm identifier (e.g.,13L) instead of -c storm name, since the name may change (e.g., from "THIRTEEN" to -c "IRIS") for an upgrade in intensity, but the storm number -c identifier will remain the same. -c -c ict Total number of storm card entries for this dtg -c - USE def_vitals; USE set_max_parms -c - type (tcvcard) storm(maxstorm) - logical okstorm(maxstorm) - character found_dup*1 -c - ist = 1 - do while (ist < ict) - - isortnum = ist + 1 - found_dup = 'n' - do while (isortnum <= ict .and. found_dup == 'n') - - if (storm(ist)%tcv_storm_id == storm(isortnum)%tcv_storm_id) - & then - found_dup = 'y' - endif - isortnum = isortnum + 1 - - enddo - - if (found_dup == 'y') then - okstorm(ist) = .FALSE. - endif - - ist = ist + 1 - - enddo - -c Now set the last member of the array to be checked as okay, -c since all potential duplicates for this record were eliminated -c in the previous sort while loop just completed. - - okstorm(ict) = .TRUE. -c - return - end -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine read_nlists (dnow,dold,dfuture,vit_hr_incr) -c -c ABSTRACT: Read in the namelists that contain the date for the -c current time, the time from 3h or 6h ago, and the time from 3h -c or 6h ahead . It also converts the input dates for the current -c time, the old time and the future time into a format that can -c be easily compared against the dates in the TC Vitals file. -c - USE inparms; USE date_checks -c - type (datecard) dnow,dold,dfuture -c - integer vit_hr_incr -c - namelist/datenowin/dnow - namelist/dateoldin/dold - namelist/datefuturein/dfuture - namelist/hourinfo/vit_hr_incr -c - read (5,NML=datenowin,END=801) - 801 continue - read (5,NML=dateoldin,END=803) - 803 continue - read (5,NML=datefuturein,END=805) - 805 continue - read (5,NML=hourinfo,END=807) - 807 continue -c - ymd_now = dnow%yy * 10000 + dnow%mm * 100 + dnow%dd - hhmm_now = dnow%hh * 100 - ymd_old = dold%yy * 10000 + dold%mm * 100 + dold%dd - hhmm_old = dold%hh * 100 - ymd_future = dfuture%yy * 10000 + dfuture%mm * 100 + dfuture%dd - hhmm_future = dfuture%hh * 100 -c - return - end -c -c---------------------------------------------------------------------- -c -c---------------------------------------------------------------------- - integer function char2int (charnum) -c -c This function takes as input a character numeral and -c returns the integer equivalent -c - character*1 charnum,cx(10) - data cx/'0','1','2','3','4','5','6','7','8','9'/ -c - do i=1,10 - if (charnum.eq.cx(i)) char2int = i-1 - enddo -c - return - end -c -c---------------------------------------------------------------------- -c -c---------------------------------------------------------------------- - character function int2char (inum) -c -c This function takes as input an integer and -c returns the character numeral equivalent -c - character*1 cx(10) - data cx/'0','1','2','3','4','5','6','7','8','9'/ -c - do i=1,10 - ihold=i-1 - if (ihold.eq.inum) int2char = cx(i) - enddo -c - return - end diff --git a/sorc/supvit.fd/supvit_modules.f b/sorc/supvit.fd/supvit_modules.f deleted file mode 100755 index 9172af58db..0000000000 --- a/sorc/supvit.fd/supvit_modules.f +++ /dev/null @@ -1,52 +0,0 @@ - module def_vitals - type tcvcard ! Define a new type for a TC Vitals card - character*4 tcv_center ! Hurricane Center Acronym - character*3 tcv_storm_id ! Storm Identifier (03L, etc) - character*9 tcv_storm_name ! Storm name - integer tcv_century ! 2-digit century id (19 or 20) - integer tcv_yymmdd ! Date of observation - integer tcv_hhmm ! Time of observation (UTC) - integer tcv_lat ! Storm Lat (*10), always >0 - character*1 tcv_latns ! 'N' or 'S' - integer tcv_lon ! Storm Lon (*10), always >0 - character*1 tcv_lonew ! 'E' or 'W' - integer tcv_stdir ! Storm motion vector (in degr) - integer tcv_stspd ! Spd of storm movement (m/s*10) - character*85 tcv_chunk ! Remainder of vitals record; - ! will just be read & written - end type tcvcard - end module def_vitals -c - module inparms - type datecard ! Define a new type for the input namelist parms - sequence - integer yy ! Beginning yy of date to search for - integer mm ! Beginning mm of date to search for - integer dd ! Beginning dd of date to search for - integer hh ! Beginning hh of date to search for - end type datecard - end module inparms -c - module date_checks - integer, save :: ymd_now,hhmm_now,ymd_old,hhmm_old - & ,ymd_future,hhmm_future - end module date_checks -c - module set_max_parms - integer, parameter :: maxstorm=400 ! max # of storms pgm can - ! handle - end module set_max_parms -c - module trig_vals - real, save :: pi, dtr, rtd - real, save :: dtk = 111194.9 ! Dist (m) over 1 deg lat - ! using erad=6371.0e+3 - real, save :: erad = 6371.0e+3 ! Earth's radius (m) - real, save :: ecircum = 40030200 ! Earth's circumference - ! (m) using erad=6371.e3 - real, save :: omega = 7.292e-5 - real, save :: secphr = 3600. - end module trig_vals -c -c------------------------------------------------------ -c diff --git a/sorc/syndat_getjtbul.fd/getjtbul.f b/sorc/syndat_getjtbul.fd/getjtbul.f deleted file mode 100755 index c6e93f752b..0000000000 --- a/sorc/syndat_getjtbul.fd/getjtbul.f +++ /dev/null @@ -1,248 +0,0 @@ -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C -C MAIN PROGRAM: SYNDAT_GETJTBUL RETRIEVES JTWC BULLETINS FROM TANK -C PRGMMR: STOKES ORG: NP23 DATE: 2013-02-22 -C -C ABSTRACT: RETRIEVES TROPICAL CYCLONE POSITION AND INTENSITY -C INFORMATION FROM JOINT TYPHOON WARNING CENTER/FNMOC. THESE -C BULLETINS COME IN TWO PIECES. THIS PROGRAM READS THEM AND -C JOINS THEM TOGETHER. THIS ALLOWS THE DOWNSTREAM PROGRAM -C QCTROPCY TO PROCESS THEM. -C -C PROGRAM HISTORY LOG: -C 1997-06-23 S. J. LORD ---- ORIGINAL AUTHOR -C 1998-11-24 D. A. KEYSER -- FORTRAN 90/Y2K COMPLIANT -C 1998-12-30 D. A. KEYSER -- MODIFIED TO ALWAYS OUTPUT RECORDS -C CONTAINING A 4-DIGIT YEAR (REGARDLESS OF INPUT) -C 2000-03-09 D. A. KEYSER -- MODIFIED TO RUN ON IBM-SP; CORRECTED -C PROBLEM FROM EARLIER CRAY VERSION WHICH RESULTED -C IN AN INCORRECT JOINING OF PIECES IF THE SAME -C 2-PIECE BULLETIN IS DUPLICATED IN THE ORIGINAL FILE -C THAT IS READ IN BY THIS PROGRAM -C 2013-02-22 D. C. STOKES -- MINOR DOC CHANGES. (WCOSS TRANSIITON) -C -C USAGE: -C INPUT FILES: -C UNIT 11 - FILE CONTAINING JTWC/FNMOC BULLETINS -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 51 - FILE CONTAINING JTWC/FNMOC BULLETINS NOW JOINED -C TOGETHER -C -C SUBPROGRAMS CALLED: -C UNIQUE: - NONE -C LIBRARY: -C W3NCO - W3TAGB W3TAGE ERREXIT -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN, DATA RETRIEVED -C = 1 - SUCCESSFUL RUN -- NO DATA RETRIEVED -C = 20 - TROUBLE - EITHER READ ERROR WITHIN PROGRAM OR -C NUMBER OF RECORDS IN INPUT FILE EXCEEDS PROGRAM -C LIMIT. -C -C REMARKS: THE Y2K-COMPLIANT VERSION IS SET-UP TO READ RECORDS WITH -C EITHER A 2-DIGIT YEAR STARTING IN COLUMN 20 OR A 4-DIGIT -C YEAR STARTING IN COLUMN 20. THIS WILL ALLOW THIS PROGRAM -C TO RUN PROPERLY WHEN JTWC/FNMOC TRANSITIONS RECORDS TO -C A 4-DIGIT YEAR. -C -C ATTRIBUTES: -C LANGUAGE FORTRAN 90 -C MACHINE: IBM SP and IBM iDataPlex -C -C$$$ - PROGRAM SYNDAT_GETJTBUL - - PARAMETER (NBULS=200) - - CHARACTER*1 INL1(80) - CHARACTER*9 STNAME - CHARACTER*18 HEAD(NBULS),CHEKHED - CHARACTER*37 ENDMSG - CHARACTER*80 INL,INLS(NBULS) - CHARACTER*80 DUMY2K - CHARACTER*95 OUTL - - INTEGER LINE(NBULS) - - EQUIVALENCE (INL1,INL) - - DATA IIN/11/,IOUT/51/,LINE/NBULS*0/ - - CALL W3TAGB('SYNDAT_GETJTBUL',2013,0053,0050,'NP23 ') - - WRITE(6,*) ' ' - WRITE(6,*) '===> WELCOME TO SYNDAT_GETJTBUL - F90/Y2K VERSION ', - $ '02-22-2013' - WRITE(6,*) ' ' - WRITE(6,*) ' ' - - NLINE = 0 - - DO N=1,NBULS - INL1=' ' - READ(IIN,2,END=100,ERR=200) INL - 2 FORMAT(A80) - NLINE = N - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -c OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -c BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -c LATITUDE BLANK CHARACTER TO FIND OUT ... - - IF(INL1(26).EQ.' ') THEN - -c ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -c ... THIS PROGRAM WILL NOW CONVERT THE RECORD TO A 4-DIGIT YEAR USING -c THE "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> This is an old-format record with a 2-digit ', - $ 'year "',INL(20:21),'"' - PRINT *, ' ' - DUMY2K(1:19) = INL(1:19) - IF(INL(20:21).GT.'20') then - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:80) = INL(20:80) - INL= DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ INL(20:23),'" via windowing technique' - PRINT *, ' ' - - ELSE - -c ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -c ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> This is an new-format record with a 4-digit ', - $ 'year "',INL(20:23),'"' - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - end if - - WRITE(6,3) NLINE,INL - 3 FORMAT(' ...Bulletin line number',I4,' is....',A80,'...') - INLS(NLINE)=INL - HEAD(NLINE)=INL(1:18) - WRITE(6,4) NLINE,HEAD(NLINE) - 4 FORMAT(' ... Header for line number',I4,' is ...',A18,'...') - ENDDO - -C Come here if no. of records in input file exceeds pgm limit ("NBULS") -C --------------------------------------------------------------------- - - WRITE(6,301) NBULS - 301 FORMAT(' **** Number of records in input File exceeds program ', - $ 'limit of',I4,'. Abort') - ICODE=20 - ENDMSG='SYNDAT_GETJTBUL TERMINATED ABNORMALLY' - GO TO 900 - - 100 CONTINUE - -C All records read in -C ------------------- - - IF(NLINE.EQ.0) THEN - -C Come here if ZERO records were read from input file -C --------------------------------------------------- - - ICODE=1 - WRITE(6,101) - 101 FORMAT(' ...No Bulletins available.') - ENDMSG='SYNDAT_GETJTBUL TERMINATED NORMALLY ' - GO TO 900 - ENDIF - - IF(MOD(NLINE,2).NE.0) THEN - -C Come here if number of records read was not even -C ------------------------------------------------ - - WRITE(6,111) NLINE - 111 FORMAT(' **** Number of records read in (=',I4,') is not ', - $ 'even. Abort') - ICODE=20 - ENDMSG='SYNDAT_GETJTBUL TERMINATED ABNORMALLY' - GO TO 900 - ENDIF - - PRINT *, ' ' - PRINT *, ' ' - NBULT=NLINE/2 - NBUL=0 - LOOP1: DO NL=1,NLINE - IF(LINE(NL).EQ.1) CYCLE LOOP1 - CHEKHED=HEAD(NL) - IFND = 0 - LOOP1n1: DO NB=NL+1,NLINE - IF(LINE(NB).EQ.1) CYCLE LOOP1n1 - NBSAV=NB - WRITE(6,11) CHEKHED,INLS(NB)(1:18) - 11 FORMAT(' ...message parts are ...',A18,'...',A18,'...') - IF(CHEKHED .EQ. INLS(NB)(1:18)) THEN - LINE(NL) = 1 - LINE(NB) = 1 - IFND = 1 - EXIT LOOP1n1 - ENDIF - ENDDO LOOP1n1 - IF(IFND.EQ.1) THEN - WRITE(6,131) INLS(NL)(10:10) - 131 FORMAT(' ...inls(nl)(10:10)=',A1,'...') - IF(INLS(NL)(10:10).eq.' ') THEN - LOOP 1n2: DO IB=11,18 - IS=IB - IF(INLS(NL)(IS:IS).NE.' ') EXIT LOOP 1n2 - ENDDO LOOP 1n2 - STNAME=' ' - STNAME=INLS(NL)(IS:18) - INLS(NL)(10:18)=STNAME - ENDIF - OUTL=INLS(NL)(1:66)//INLS(NBSAV)(33:61) - WRITE(6,145) OUTL - 145 FORMAT(' ...Complete bulletin is ...',A95,'...') - WRITE(IOUT,22) OUTL - 22 FORMAT(A95) - NBUL=NBUL+1 - ENDIF - IF(NBUL .EQ. NBULT) GO TO 150 - ENDDO LOOP1 - - 150 CONTINUE - WRITE(6,151) NBUL - 151 FORMAT(' ...',I4,' bulletins have been made.') - ICODE=0 - ENDMSG='SYNDAT_GETJTBUL TERMINATED NORMALLY ' - GO TO 900 - - 200 continue - -C Come here if error reading a record from input file -C --------------------------------------------------- - - WRITE(6,201) - 201 FORMAT(' **** ERROR READING RECORD FROM INPUT FILE. ABORT') - ICODE=20 - ENDMSG='SYNDAT_GETJTBUL TERMINATED ABNORMALLY' - - 900 CONTINUE - - WRITE(6,*) ENDMSG - - CALL W3TAGE('SYNDAT_GETJTBUL') - - IF(ICODE.GT.0) CALL ERREXIT(ICODE) - - STOP - - END diff --git a/sorc/syndat_getjtbul.fd/makefile b/sorc/syndat_getjtbul.fd/makefile deleted file mode 100755 index 3ac5730f31..0000000000 --- a/sorc/syndat_getjtbul.fd/makefile +++ /dev/null @@ -1,23 +0,0 @@ -SHELL= /bin/sh -#LIBS= -L/nwprod/lib -lw3nco_v2.0.5_4 -#LIBS= -L/contrib/nceplibs/nwprod/lib -lw3nco_v2.0.5_4 -FC= ifort -#DEBUG = -ftrapuv -check all -fp-stack-check -fstack-protector -##DEBUG = -ftrapuv -fp-stack-check -fstack-protector -FFLAGS= -O3 -g -traceback -assume noold_ldout_format $(DEBUG) -LDFLAGS= -SRCS= getjtbul.f -OBJS= getjtbul.o -CMD= syndat_getjtbul - -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS_SYN_GET) - -clean: - -rm -f $(OBJS) - -install: - -mv $(CMD) ../../exec/$(CMD) - diff --git a/sorc/syndat_maksynrc.fd/makefile b/sorc/syndat_maksynrc.fd/makefile deleted file mode 100755 index 9adcb17e26..0000000000 --- a/sorc/syndat_maksynrc.fd/makefile +++ /dev/null @@ -1,21 +0,0 @@ -SHELL= /bin/sh -#LIBS= -L/nwprod/lib -lw3nco_v2.0.5_4 -lbacio_v2.0.1_4 -##LIBS_SYN_MAK= -L/contrib/nceplibs/nwprod/lib -lw3nco_v2.0.5_4 -lbacio_v2.0.1_4 -FC= ifort -#DEBUG = -ftrapuv -check all -check nooutput_conversion -fp-stack-check -fstack-protector -FFLAGS= -O3 -g -traceback -assume noold_ldout_format $(DEBUG) -LDFLAGS= -SRCS= maksynrc.f -OBJS= maksynrc.o -CMD= syndat_maksynrc - -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS_SYN_MAK) - -clean: - -rm -f $(OBJS) - -install: - mv $(CMD) ../../exec/$(CMD) diff --git a/sorc/syndat_maksynrc.fd/maksynrc.f b/sorc/syndat_maksynrc.fd/maksynrc.f deleted file mode 100755 index dca5de2575..0000000000 --- a/sorc/syndat_maksynrc.fd/maksynrc.f +++ /dev/null @@ -1,472 +0,0 @@ -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C -C MAIN PROGRAM: SYNDAT_MAKSYNRC MAKE SYNDAT RECORD FROM HUMAN INPUT -C PRGMMR: STOKES ORG: NP23 DATE: 2013-03-15 -C -C ABSTRACT: QUERIES HUMAN INPUT FOR INFORMATION TO CONSTRUCT TROPICAL -C CYCLONE SYNTHETIC DATA RECORD AND WRITES RECORD TO FORTRAN -C UNIT 51 -C -C PROGRAM HISTORY LOG: -C 1997-06-26 S. J. LORD ---- ORIGINAL AUTHOR -C 1998-11-23 D. A. KEYSER -- FORTRAN 90 AND Y2K COMPLIANT -C 1998-12-30 D. A. KEYSER -- MODIFIED TO OUTPUT RECORDS CONTAINING A -C 4-DIGIT YEAR -C 2000-03-03 D. A. KEYSER -- CONVERTED TO RUN ON IBM-SP MACHINE -C 2013-03-15 D. C. STOKES -- Modified some stdout writes to display -C cleanly as part of WCOSS transition. -C -C USAGE: -C INPUT FILES: -C UNIT 05 - INPUT FILE FOR HUMAN (KEYBOARD ENTRY) -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 51 - SYNTHETIC DATA RECORD (ONE PER RUN) -C -C SUBPROGRAMS CALLED: -C UNIQUE: - BEGINE ENDE MAKVIT NSEW -C LIBRARY: -C W3LIB: - W3TAGB W3TAGE -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN -C -C REMARKS: -C -C ATTRIBUTES: -C LANGUAGE FORTRAN 90 -C MACHINE: IBM-SP, IBM-iDataPlex -C -C$$$ - program SYNDAT_MAKSYNRC - logical fstflg - character rsmc*4,stmnam*9,stmid*3 - data iuntvi/51/,fstflg/.false./ - - CALL W3TAGB('SYNDAT_MAKSYNRC',2013,0074,0000,'NP23 ') - - write(6,*) "Welcome to the Synthetic Data Record Maker" - write(6,*) "+++ FORTRAN 90 / Y2K VERSION +++" - write(6,*) "+++ 03 March 2000 +++" - write(6,*) "Please follow all directions carefully, paying" - write(6,*) "careful attention to the Units as units" - write(6,*) "conversions are hardwired" - - call begine - write(6,*) 'Enter Storm Name (UPPER CASE)' - read(5,1) stmnam - 1 format(a) - write(6,2) stmnam - 2 format(' Storm name is:',a9) - call ende - - call begine - write(6,*) 'Enter Storm Identifier (e.g. 03P)' - read(5,11) stmid - 11 format(a) - write(6,12) stmid - 12 format(' Storm Identifier is:',a3) - call ende - - call begine - write(6,*) 'Enter Organization ID (e.g. NHC, JTWC)' - read(5,11) rsmc - write(6,13) rsmc - 13 format(' Organization Identifier is:',a4) - call ende - - call begine - write(6,*) 'Enter date (yyyymmdd)' - read(5,*) idate - write(6,*) 'Date is: ',idate - call ende - - call begine - write(6,*) 'Enter hour (hh)' - read(5,*) ihour - iutc=ihour*100 - write(6,*) 'Hour is: ',ihour - call ende - - call begine - write(6,*) 'Enter storm latitude (negative for south)' - read(5,*) stmlat - write(6,'(x,a,f5.1)') 'Storm latitude is: ',stmlat - call ende - - call begine - write(6,*) 'Enter storm longitude (DEG EAST)' - read(5,*) stmlon - write(6,'(x,a,f5.1)') 'Storm longitude is: ',stmlon - call ende - - call begine - write(6,*) 'Enter storm direction (DEG FROM NORTH)' - read(5,*) stmdir - write(6,'(x,a,f4.0)') 'Storm direction is: ',stmdir - call ende - - call begine - write(6,*) 'Enter storm speed (KNOTS)' - read(5,*) stmspd - write(6,'(x,a,f6.2)') 'Storm speed is: ',stmspd - stmspd=stmspd/1.94 - call ende - - call begine - write(6,*) 'Enter storm central pressure (MB)' - read(5,*) pcen - write(6,'(x,a,f5.0)') 'Storm central pressure is: ',pcen - call ende - - call begine - write(6,*) 'Enter storm environmental pressure (MB)' - read(5,*) penv - write(6,'(x,a,f5.0)') 'Storm environmental pressure is: ',penv - call ende - - call begine - write(6,*) 'Enter estimated maximum wind (KNOTS)' - read(5,*) vmax - write(6,'(x,a,f4.0)') 'Estimated maximum wind (KNOTS) is: ',vmax - vmax=vmax/1.94 - call ende - - call begine - write(6,*) 'Enter estimated radius of outermost closed ', - 1'isobar (ROCI), i.e. size of the storm circulation (KM)' - read(5,*) rmax - write(6,'(x,a,f5.0)') 'Estimated ROCI (KM) is: ',rmax - call ende - - call begine - write(6,*) 'Enter estimated radius of maximum wind (KM)' - read(5,*) rmw - write(6,'(x,a,f5.0)') - 1 'Estimated radius of maximum wind (KM) is: ',rmw - call ende - - call begine - call nsew - write(6,*) 'Enter estimated radius of 15 m/s (35 knot) winds (KM)' - write(6,*) - 1 'in each each of the following quadrants (e.g. 290 222 200 180)' - write(6,*) 'Note: numbers must be separated by blanks' - write(6,*) 'Note: numbers must be in the order NE SE SW NW and be' - 1 ,' separated by blanks' - write(6,*) 'Note: enter all negative numbers to denote no ', - 1'estimate' - read(5,*) r15ne,r15se,r15sw,r15nw - write(6,'(x,a,4f8.0)') - 1 'Estimated radius of 15 m/s (35 knot) winds is: ', - 2 r15ne,r15se,r15sw,r15nw - call ende - - call begine - call nsew - write(6,*) 'Enter estimated radius of 26 m/s (55 knot) winds (KM)' - write(6,*) - 1 'in each each of the following quadrants (e.g. 50 50 50 50)' - write(6,*) 'Note: numbers must be separated by blanks' - write(6,*) 'Note: numbers must be in the order NE SE SW NW and be' - 1'separated by blanks' - write(6,*) 'Note: enter all negative numbers to denote no ', - 1'estimate' - read(5,*) r26ne,r26se,r26sw,r26nw - write(6,'(x,a,4f8.0)') - 1 'Estimated radius of 26 m/s (35 knot) winds is: ', - 2 r26ne,r26se,r26sw,r26nw - call ende - - call begine - write(6,*) 'Enter estimated top of cyclonic circulation (mb)' - read(5,*) ptop - write(6,'(x,a,f7.1)') - 1 'Estimated top of cyclonic circulation (mb) is: ',ptop - call ende - - call begine - write(6,*) 'Enter estimated latitude at maximum forecast time ' - write(6,*) '(negative for south)' - write(6,*) 'Note: enter -99.0 to denote no estimate' - read(5,*) fclat - write(6,'(x,a,f5.1)') - 1 'Estimated latitude at maximum forecast time is: ', fclat - call ende - - call begine - write(6,*) 'Enter estimated longitude at maximum forecast time ' - write(6,*) '(DEG EAST)' - write(6,*) 'Note: enter a negative number to denote no estimate' - read(5,*) fclon - write(6,'(x,a,f5.1)') - 1 'Estimated longitude at maximum forecast time is: ', fclon - call ende - - call begine - write(6,*) 'Enter maximum forecast time (hours, e.g. 72)' - write(6,*) 'Note: enter a negative number to denote no estimate' - read(5,*) fcstp - write(6,'(x,a,f4.0)') 'Maximum forecast time is: ',fcstp - call ende - - CALL MAKVIT(IUNTVI,IDATE,IUTC,STMLAT,STMLON,STMDIR,STMSPD, - 1 PCEN,PENV,RMAX,VMAX,RMW,R15NE,R15SE,R15SW, - 2 R15NW,PTOP,STMNAM,STMID,RSMC,FSTFLG,r26ne, - 3 r26se,r26sw,r26nw,fcstp,fclat,fclon) - - CALL W3TAGE('SYNDAT_MAKSYNRC') - stop - end - SUBROUTINE BEGINE - write(6,1) - 1 format(' ') - write(6,11) - 11 format(' *******************************************************') - return - end - - SUBROUTINE ENDE - write(6,1) - 1 format(' *******************************************************') - write(6,11) - 11 format(' ') - return - end -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: MAKVIT CREATES TROP. CYCLONE VITAL. STAT. DATA -C PRGMMR: D. A. KEYSER ORG: NP22 DATE: 1998-12-30 -C -C ABSTRACT: CREATES TROPICAL CYCLONE VITAL STATISTICS RECORDS FROM -C RAW INFORMATION SUCH AS LATITUDE, LONGITUDE, MAX. WINDS ETC. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD ---- ORIGINAL AUTHOR -C 1998-11-23 D. A. KEYSER -- FORTRAN 90 AND Y2K COMPLIANT -C 1998-12-30 D. A. KEYSER -- MODIFIED TO OUTPUT RECORDS CONTAINING A -C 4-DIGIT YEAR -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN 90 -C MACHINE: CRAY, SGI -C -C$$$ - SUBROUTINE MAKVIT(IUNTVI,IDATE,IUTC,STMLAT,STMLON,STMDIR,STMSPD, - 1 PCEN,PENV,RMAX,VMAX,RMW,R15NE,R15SE,R15SW, - 2 R15NW,PTOP,STMNAM,STMID,RSMC,FSTFLG,r26ne, - 3 r26se,r26sw,r26nw,fcstp,fclat,fclon) -C - SAVE -C - CHARACTER *(*) RSMC,STMNAM,STMID - LOGICAL FSTFLG -C - PARAMETER (MAXCHR=129) - PARAMETER (MAXVIT=22) - PARAMETER (MAXTPC= 3) -C - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 SHALO*1,MEDIUM*1, - 2 DEEP*1,LATNS*1,LONEW*1,FMTVIT*6,FMTMIS*4,BUFINZ*129, - 3 RELOCZ*1,STMTPC*1,EXE*1, - 7 latnsf,lonewf -C - DIMENSION IVTVAR(MAXVIT),VITVAR(MAXVIT),VITFAC(MAXVIT), - 1 ISTVAR(MAXVIT),IENVAR(MAXVIT),STMTOP(0:MAXTPC) -C - DIMENSION BUFIN(MAXCHR),STMTPC(0:MAXTPC),FMTVIT(MAXVIT), - 1 MISSNG(MAXVIT),FMTMIS(MAXVIT) -C - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ), - 4 (BUFIN(123),LATNSF),(BUFIN(129),LONEWF) -C - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) -C - EQUIVALENCE (VITVAR( 3),STMLTZ),(VITVAR( 4),STMLNZ), - 1 (VITVAR( 5),STMDRZ),(VITVAR( 6),STMSPZ), - 2 (VITVAR( 7),PCENZ), (VITVAR( 8),PENVZ), - 3 (VITVAR( 9),RMAXZ), (VITVAR(10),VMAXZ), - 4 (VITVAR(11),RMWZ), (VITVAR(12),R15NEZ), - 5 (VITVAR(13),R15SEZ),(VITVAR(14),R15SWZ), - 6 (VITVAR(15),R15NWZ),(VITVAR(16),R26NEZ), - 7 (VITVAR(17),R26SEZ),(VITVAR(18),R26SWZ), - 8 (VITVAR(19),R26NWZ),(VITVAR(20),FCSTPZ), - 9 (VITVAR(21),FCLATZ),(VITVAR(22),FCLONZ) -C - EQUIVALENCE (STMTPC(0), EXE),(STMTPC(1),SHALO),(STMTPC(2),MEDIUM), - 1 (STMTPC(3),DEEP) -C - DATA SHALO/'S'/,MEDIUM/'M'/,DEEP/'D'/,EXE/'X'/, - 2 VITFAC/2*1.0,2*0.1,1.0,0.1,14*1.0,2*0.1/, - 3 FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 4 3*'(I4.4)','(I2.2)','(I3.3)',8*'(I4.4)','(I2.2)', - 5 '(I3.3)','(I4.4)'/, - 6 FMTMIS/'(I8)','(I4)','(I3)','(I4)',2*'(I3)',3*'(I4)', - 7 '(I2)','(I3)',8*'(I4)','(I2)','(I3)','(I4)'/, - 8 MISSNG/-9999999,-999,-99,-999,2*-99,3*-999,-9,-99,8*-999,-9, - 9 -99,-999/, - O ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90, 97,102, - O 107,112,117,120,125/, - 1 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93,100,105, - 1 110,115,118,122,128/, - 3 STMTOP/-99.0,700.,400.,200./ -C - BUFINZ=' ' - RSMCZ=RSMC -cvvvvvy2k - -C NOTE: This program OUTPUTS a record containing a 4-digit year - for -C example: - -C NHC 13L MITCH 19981028 1800 164N 0858W 270 010 0957 1008 0371 51 019 0278 0278 0185 0185 D -C 12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345 ... -C 1 2 3 4 5 6 7 8 9 ... - -C This program will truncate the integer work containing the -C date in the form yyyymmdd to the form yymmdd prior to writing -C it into the output record. -cppppp - print *, ' ' - print *, ' ' - print *, '==> tcvitals file can now contain a 4-digit year, so ', - $ 'no conversion is necessary since 4-digit year is input' - print *, ' ' - print *, ' ' -cppppp -caaaaay2k - IDATEZ=IDATE - IUTCZ=IUTC - STMNMZ=STMNAM - STMIDZ=STMID - STMLTZ=STMLAT -C - IF(STMLTZ .GE. 0.0) THEN - LATNS='N' - ELSE - LATNS='S' - STMLTZ=ABS(STMLTZ) - ENDIF -C - IF(STMLON .GE. 180.) THEN - STMLNZ=360.-STMLON - LONEW='W' -C - ELSE - STMLNZ=STMLON - LONEW='E' - ENDIF -C - IF(fclat .GE. 0.0) THEN - fclatz=fclat - latnsf='N' - ELSE if (fclat .gt. -90.) then - latnsf='S' - fclatz=ABS(fclat) -c - else - latnsf='S' - fclatz=-99.9 - ENDIF -C - IF(fclon .GE. 180.) THEN - fclonz=360.-fclon - lonewf='W' -C - ELSE if (fclon .gt. 0.) then - fclonz=fclon - lonewf='E' -c - else - fclonz=-999.9 - lonewf='E' - ENDIF -C - STMDRZ=STMDIR - STMSPZ=STMSPD - PCENZ =PCEN - PENVZ =PENV - RMAXZ =RMAX - VMAXZ =VMAX - RMWZ =RMW - R15NEZ=R15NE - R15SEZ=R15SE - R15SWZ=R15SW - R15NWZ=R15NW - r26nez=r26ne - r26sez=r26se - r26swz=r26sw - r26nwz=r26nw - fcstpz=fcstp -C - FSTFLZ=' ' - IF(FSTFLG) FSTFLZ=':' -C - DO IV=1,2 - IF(IVTVAR(IV) .GE. 0) THEN - WRITE(BUFINZ(ISTVAR(IV):IENVAR(IV)),FMTVIT(IV)) IVTVAR(IV) - ELSE - WRITE(BUFINZ(ISTVAR(IV):IENVAR(IV)),FMTMIS(IV)) MISSNG(IV) - ENDIF - ENDDO -C - DO IV=3,MAXVIT - IF(VITVAR(IV) .GE. 0) THEN - IVTVAR(IV)=NINT(VITVAR(IV)/VITFAC(IV)) - WRITE(BUFINZ(ISTVAR(IV):IENVAR(IV)),FMTVIT(IV)) IVTVAR(IV) - ELSE - WRITE(BUFINZ(ISTVAR(IV):IENVAR(IV)),FMTMIS(IV)) MISSNG(IV) - ENDIF - ENDDO -C - DO ITOP=0,MAXTPC - IF(PTOP .EQ. STMTOP(ITOP)) THEN - STMDPZ=STMTPC(ITOP) - GO TO 31 - ENDIF - ENDDO - - 31 CONTINUE -C - IF(IUNTVI .GT. 0) THEN - WRITE(IUNTVI,41) BUFINZ - 41 FORMAT(A) - WRITE(6,43) BUFINZ - 43 FORMAT(' ...',A,'...') - ELSE - WRITE(6,43) BUFINZ - ENDIF -C - RETURN - END - - SUBROUTINE NSEW - write(6,*) ' Quadrants' - write(6,*) ' NW : NE' - write(6,*) '----------- Order of quadrants: NE SE SW NW' - write(6,*) ' SW : SE' - return - end diff --git a/sorc/syndat_qctropcy.fd/makefile b/sorc/syndat_qctropcy.fd/makefile deleted file mode 100755 index d667c26cbe..0000000000 --- a/sorc/syndat_qctropcy.fd/makefile +++ /dev/null @@ -1,23 +0,0 @@ -SHELL= /bin/sh -#LIBS= -L/nwprod/lib -lw3nco_v2.0.5_8 -##LIBS= -L/contrib/nceplibs/nwprod/lib -lw3nco_v2.0.5_8 -FC= ifort -#DEBUG = -ftrapuv -check all -check noarg_temp_created -fp-stack-check -fstack-protector -## if '-check all' enabled, include '-check noarg_temp_created' to avoid warning msgs indicating -## slight performance hit due to chosen method of passing array arguments to w3difdat -FFLAGS= -O3 -g -traceback -r8 -i8 -assume byterecl -assume noold_ldout_format $(DEBUG) -LDFLAGS= -SRCS= qctropcy.f -OBJS= qctropcy.o -CMD= syndat_qctropcy - -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS_SYN_QCT) - -clean: - -rm -f $(OBJS) - -install: - mv $(CMD) ../../exec/$(CMD) diff --git a/sorc/syndat_qctropcy.fd/qctropcy.f b/sorc/syndat_qctropcy.fd/qctropcy.f deleted file mode 100755 index e6bfadebd4..0000000000 --- a/sorc/syndat_qctropcy.fd/qctropcy.f +++ /dev/null @@ -1,12099 +0,0 @@ -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C -C MAIN PROGRAM: SYNDAT_QCTROPCY PERFORMS QC ON TROP. CYCLONE BULLETINS -C PRGMMR: KEYSER ORG: NP22 DATE: 2008-07-10 -C -C ABSTRACT: PERFORMS QUALITY CONTROL ON TROPICAL CYCLONE POSITION -C AND INTENSITY INFORMATION (T. C. VITAL STATISTICS). CHECKS -C PERFORMED ARE: DUPLICATE RECORDS, APPROPRIATE DATE/TIME, PROPER -C RECORD STRUCTURE (BLANKS IN PROPER PLACE AND NO IMPROPER NON- -C INTEGER NUMBERS), STORM NAME/ID NUMBER, RECORDS FROM MULTIPLE -C INSTITUTIONS, SECONDARY VARIABLES (E.G. CENTRAL PRESSURE), -C STORM POSITION AND DIRECTION/SPEED. EMPHASIS IS ON INTERNAL -C CONSISTENCY BETWEEN REPORTED STORM LOCATION AND PRIOR MOTION. -C -C PROGRAM HISTORY LOG: -C 1991-03-27 S. J. LORD -C 1991-07-18 S. J. LORD ADDED ROUTINE FSTSTM, MODIFIED ADFSTF -C 1992-01-22 S. J. LORD CHANGED W3FS12,W3FS13 CALLS TO W3FS19, W3FS17 -C 1992-02-19 S. J. LORD ADDED MULTIPLE RSMC CHECK -C 1992-04-09 S. J. LORD CHANGED SLMASK TO T126 FROM T80 -C 1992-05-20 S. J. LORD CORRECTED BUG IN SELACK CALL -C 1992-06-09 J. JOHNSON CHANGED COND=10 TO COND=4 FOR SUCCESSFUL RUN -C BUT WITH EMPTY INPUT FILES -C 1992-07-01 S. J. LORD ADDED DATE CHECK AND REVISED RITCUR -C 1992-07-10 S. J. LORD REVISED STIDCK TO DISMANTLE CONSISTENCY -C CHECKS IN THE CASE OF NUMBERED DEPRESSIONS -C 1992-07-16 S. J. LORD FIXED SOME BUGS IN RSMCCK -C 1992-08-20 S. J. LORD ADDED THE JTWC MEMORIAL SWITCH CHECK -C 1992-08-20 S. J. LORD MODIFIED DUPCHK TO ADD A NEW INPUT UNIT -C 1992-09-04 S. J. LORD ADDED PRESSURE WIND RELATIONSHIP TO SECVCK -C 1992-09-09 S. J. LORD ADDED CENTRAL PACIFIC NAMES AND NAME CHECK -C 1992-09-18 S. J. LORD ADDED CHECK FOR CORRECT MISSING DATA IN READCK -C 1992-10-28 S. J. LORD ADDED GREEK ALPHABET STORM NAMES -C 1992-12-14 S. J. LORD MODIFIED CONSOLE MESSAGE FOR ISTOP=4 -C 1993-03-05 S. J. LORD IMPLEMENTED STORM CATALOG (RCNCIL) -C 1993-03-31 S. J. LORD IMPLEMENTED READING STORM NAMES FROM EXTERNAL -C FILE IN STIDCK -C 1993-04-08 S. J. LORD IMPLEMENTED WEST PACIFIC CLIPER -C 1993-08-25 S. J. LORD ADDER RETURN CODE OF 10 FOR RCNCIL LOGICAL -C ERROR -C 1993-08-25 S. J. LORD UPGRADED STORM ID CHECKING FOR STORMS CHANGING -C 1994-06-20 S. J. LORD MODIFIED MAXCHK FOR THE GFDL FORMAT -C 1996-04-12 S. J. LORD REMOVED CALL TO DRSPCK -C 1997-06-24 S. J. LORD ADDED NEW UNIT FOR MANUALLY ENTERED MESSAGES -C 1998-03-24 S. J. LORD MODIFIED VITDATN.INC AND VITFMTN.INC TO -C RECOGNIZE RSMC ID "NWOC" (THIS HAD BEEN UNRECOGNIZED -C AND HAD CAUSED THE PROGRAM TO STOP 20); REMOVED -C UNINITIALIZED VARIABLES THAT WERE CAUSING COMPILER -C WARNINGS -C 1998-06-05 D.A. KEYSER - FORTRAN 90 AND Y2K COMPLIANT -C 1998-06-18 S.J. LORD - FORTRAN 90 AND Y2K COMPLIANT (vitfmt.inc) -C 1998-08-16 S.J. LORD - FORTRAN 90 AND Y2K COMPLIANT (completed) -C 1998-12-14 D. A. KEYSER - Y2K/F90 COMPLIANCE, STREAMLINED CODE; -C 2000-03-03 D. A. KEYSER - CONVERTED TO RUN ON IBM-SP MACHINE -C 2001-02-07 D. A. KEYSER - EXPANDED TEST STORM ID RANGE FROM 90-99 -C TO 80-99 AT REQUEST FOR JIM GROSS AT TPC {NOTE: IF THIS -C EVER HAS TO BE DONE AGAIN, THE ONLY LINES THAT NEED TO -C BE CHANGED ARE COMMENTED AS "CHG. TESTID" - ALSO MUST -C CHANGE PROGRAM bulls_bufrcyc WHICH GENERATES GTS -C MESSAGES, CHANGE UTILITY PROGRAM trpsfcmv WHICH -C GENERATES CHARTS FOR THE TROPICS (although technically -C trpsfcmv reads in q-c'd tcvitals files output by this -C program and thus they should not have test storms in -C them), and changes scripts: util/ush/extrkr.sh and -C ush/relocate_extrkr.sh} -C 2004-06-08 D. A. KEYSER - WHEN INTEGER VALUES ARE DECODED FROM -C CHARACTER-BASED RECORD VIA INTERNAL READ IN SUBR. DECVAR, -C IF BYTE IN UNITS DIGIT LOCATION IS ERRONEOUSLY CODED AS -C BLANK (" "), IT IS REPLACED WITH A "5" IN ORDER TO -C PREVENT INVALID VALUE FROM BEING RETURNED (I.E., IF -C "022 " WAS READ, IT WAS DECODED AS "22", IT IS NOW -C DECODED AS "225" - THIS HAPPENED FOR VALUE OF RADIUS OF -C LAST CLOSED ISOBAR FOR JTWC RECORDS FROM 13 JULY 2000 -C THROUGH FNMOC FIX ON 26 MAY 2004 - THE VALUE WAS REPLACED -C BY CLIMATOLOGY BECAUSE IT FAILED A GROSS CHECK, HAD THIS -C CHANGE BEEN IN PLACE THE DECODED VALUE WOULD HAVE BEEN -C W/I 0.5 KM OF THE ACTUAL VALUE) -C 2008-07-10 D. A. KEYSER - CORRECTED MEMORY CLOBBERING CONDITION -C IN SUBR. STIDCK RELATED TO ATTEMPTED STORAGE OF MORE WEST -C PACIFIC STORM NAMES FROM FILE syndat_stmnames (144) THAN -C ALLOCATED BY PROGRAM AND IN syndat_stmnames (140), THIS -C LED TO OVERWRITING OF FIRST FOUR syndat_stmnames STORM -C NAMES IN ATLANTIC BASIN FOR 2002, 2008, 2014 CYCLE - -C DISCOVERED BECAUSE 2008 STORM BERTHA (STORM #2 IN -C ATLANTIC BASIN LIST IN syndat_stmnames) WAS NOT BEING -C RECOGNIZED AND THUS NOT PROCESSED INTO OUTPUT TCVITALS -C FILE - CORRECTED BY LIMITING STORAGE OF WEST PACIFIC -C STORM NAMES TO EXACTLY THE MAXIMUM IN PROGRAM (AND NUMBER -C IN syndat_stmnames) (CURRENTLY 140), ALSO GENERALIZED -C CODE TO ENSURE THAT IS WILL NEVER CLOBBER MEMORY READING -C AND STORING STORM NAMES IN ANY OF THE BASINS EVEN IF THE -C NUMBER OF STORM NAMES IN syndat_stmnames INCREASE (AS -C LONG AS THE MAXIMUM VALUE IS .GE. TO THE NUMBER OF STORM -C NAMES FOR THE BASIN IN FILE syndat_stmnames) -C 2013-03-17 D. C. STOKES - CHANGED SOME LIST DIRECTED OUTPUT TO -C FORMATTED TO PREVENT UNNDECSSARY WRAPPING ON WCOSS. -C 2013-03-24 D. C. STOKES - INITIALIZE VARIABLES THAT WERE NOT GETTING -C SET WHEN THERE ARE NO RECORDS TO PROCESS. -C 2013-10-10 D. C. STOKES - ADDED NON-HYPHNATED CARDINAL NUMBERS IN -C ORDER TO RECOGNIZE SUCH NAMED STORMS IN BASINS L, E, C, W, -C AND TO RECOGNIZE NAME CHANGES OF SUCH IN THE OTHER BASINS. -C ALSO EXTENDED THAT LIST (FROM 36 TO 39). -C -C -C INPUT FILES: -C (Note: These need to be double checked) -C UNIT 03 - TEXT FILE ASSOCIATING UNIT NUMBERS WITH FILE NAMES -C UNIT 05 - NAMELIST: VARIABLES APPROPRIATE TO THIS Q/C PROGRAM: -C MAXUNT: NUMBER OF INPUT FILES -C FILES: LOGICAL VARIABLE CONTROLLING FINAL -C COPYING OF RECORDS AND FILE MANIPULATION. -C FOR NORMAL OPERATIONAL USAGE, SHOULD BE TRUE. -C WHEN TRUE, INPUT FILES (UNIT 30, UNIT 31, -C ETC) WILL ZEROED OUT. FOR MULTIPLE RUNS -C OVER THE SAME INPUT DATA SET, FILES MUST BE -C FALSE. FOR DEBUGGING, IT IS HIGHLY -C RECOMMENDED THAT FILES BE SET TO FALSE. -C LNDFIL: TRUE IF RECORDS OF STORMS OVER COASTAL -C POINTS ARE NOT COPIED TO THE FILE OF -C CURRENT QUALITY CONTROLLED RECORDS. -C RUNID: RUN IDENTIFIER (e.g., 'GDAS_TM00_00'). -C WINCUR: TIME WINDOW FOR WRITING CURRENT FILE -C NVSBRS: NUMBER OF VARIABLES ALLOWED FOR SUBSTITUTION -C IVSBRS: INDICES OF VARIABLES ALLOW FOR SUBSTITUTION -C UNIT 11 - APPROPRIATE T126 32-BIT GLOBAL SEA/LAND MASK FILE ON -C GAUSSIAN GRID -C UNIT 12 - RUN DATE FILE ('YYYYMMDDHH') -C UNIT 14 - DATA FILE CONTAINING STORM NAMES -C UNIT 20 - SCRATCH FILE CONTAINING PRELIMINARY Q/C RECORDS -C UNIT 21 - ORIGINAL SHORT-TERM HISTORY, CONTAINS ORIGINAL RECORDS -C BACK A GIVEN NUMBER (WINMIN) DAYS FROM PRESENT -C UNIT 22 - ALIASED SHORT-TERM HISTORY, CONTAINS ALIAS RECORDS -C BACK A GIVEN NUMBER (WINMIN) DAYS FROM PRESENT -C UNIT 25 - ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C UNIT 26 - NEW ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C NOTE: UCL SHOULD COPY THIS FILE TO UNIT 22 (THE OLD -C ALIAS FILE) AT THE END OF EXECUTION. -C UNIT 30 - STARTING POINT FOR FILES CONTAINING NEW RECORDS TO BE -C etc. QUALITY CONTROLLED. ADDITIONAL INPUT FILES ARE UNIT -C 31, UNIT 32 ETC. THE NUMBER OF THESE FILES IS -C CONTROLLED BY THE NAMELIST INPUT VARIABLE "MAXUNT" -C MENTIONED UNDER UNIT 05 ABOVE. AN EXAMPLE OF AN INPUT -C FILE IS: /tpcprd/atcf/ncep/tcvitals. THIS FILE IS -C WRITTEN BY A REMOTE JOB ENTRY (RJE) AT MIAMI AFTER ALL -C TROPICAL CYCLONE FIXES ARE ESTABLISHED FOR THE ATLANTIC -C AND EAST PACIFIC BY NHC(TPC). THIS FILE IS TYPICALLY -C UPDATED (cat'ed) AT 0230, 0830, 1430, AND 2030 UTC -C (I.E. 2.5 HOURS AFTER SYNOPTIC TIME), 4 TIMES DAILY. -C RECORDS APPROPRIATE TO A FUTURE CYCLE ARE WRITTEN BACK -C TO THE APPROPRIATE FILE. -C -C OUTPUT FILES: -C (Note: These need to be double checked) -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 20 - SCRATCH FILE CONTAINING PRELIMINARY Q/C RECORDS -C UNIT 21 - SHORT-TERM HISTORY, RECORDS BACK 4 DAYS FROM PRESENT -C UNIT 22 - NEW ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C UNIT 27 - STORM CATALOG FILE CONTAINING STORM NAME, ALIAS INFO -C FIRST AND LAST DATA OBSERVED -C UNIT 28 - SCRATCH FILE CONTAINING TEMPORARY CATALOG -C UNIT 30 - SEE INPUT FILES ABOVE. RECORDS APPROPRIATE TO A FUTURE -C etc. CYCLE ARE WRITTEN BACK TO THE APPROPRIATE FILE -C UNIT 54 - RUN DATE FILE FOR DATE CHECK ('YYYYMMDDHH') -C UNIT 60 - FILE CONTAINING QUALITY CONTROLLED RECORDS -C UNIT 61 - CONTAINS HISTORY OF ALL RECORDS THAT ARE OPERATED ON BY -C THIS PROGRAM -C -C SUBPROGRAMS CALLED: -C UNIQUE: - RSMCCK BASNCK AKASUB TCCLIM RCNCIL -C MNMXDA SCLIST AKLIST STCATI STCATN -C ADFSTF FSTSTM RITCUR RITSTH RITHIS -C FNLCPY CPYREC DUPCHK BLNKCK READCK -C DTCHK SETMSK STIDCK FIXDUP FIXNAM -C SECVCK WRNING F1 F2 SLDATE -C FIXSLM GAULAT BSSLZ1 TRKSUB NEWVIT -C DECVAR TIMSUB YTIME SORTRL DS2UV -C ATAN2D SIND COSD DISTSP AVGSUB -C ABORT1 OFILE0 -C LIBRARY: -C COMMON - IARGC GETARG INDEX -C W3LIB - W3TAGB W3TAGE W3DIFDAT W3MOVDAT W3UTCDAT -C - ERREXIT -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN. NO RECORDS WITH ERRORS -C = 1 - SUCCESSFUL RUN. FOUND RECORDS WITH STORM ID>=80 -C CHG. TESTID -C = 2 - SUCCESSFUL RUN. FOUND RECORDS WITH ERRORS -C = 3 - BOTH 1 AND 2 ABOVE -C = 4 - SUCCESSFUL RUN, BUT NO INPUT RECORDS FOUND -C = 5 - PROGRAM HAS BEEN RUN PREVIOUSLY -C =10 - LOGICAL INCONSISTENCY IN SUBROUTINE RCNCIL (??) -C =20 - FATAL ERROR (SEE STDOUT PRINT FOR MORE DETAILS) -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - PROGRAM SYNDAT_QCTROPCY - - PARAMETER (MAXCHR=95) - PARAMETER (MAXREC=1000) - PARAMETER (MAXCKS=8) - PARAMETER (MAXRC=MAXREC*(MAXCKS+1)) - PARAMETER (MAXTBP=20) - PARAMETER (MAXFIL=99) - PARAMETER (IVSBMX=14,IVSBM1=IVSBMX+1) - - CHARACTER FILNAM*128 - - DIMENSION FILNAM(0:MAXFIL) - - CHARACTER TSTREC(0:MAXREC)*100,OKAREC(MAXREC)*100, - 1 BADREC(MAXREC)*100,DUMREC*100,SCRREC(0:MAXREC)*9, - 2 XXXREC*27,ZZZREC*100,NNNREC*100,TBPREC(MAXTBP)*100, - 3 SCRATC(MAXREC)*100 - - DIMENSION IEFAIL(MAXREC,0:MAXCKS),NUMOKA(MAXREC),NUMBAD(MAXREC), - 1 NUMTST(MAXREC),NUMTBP(MAXTBP),IDUPID(MAXREC), - 2 IUNTIN(MAXREC) - -C IUNTSL: UNIT NUMBER FOR READING T126 32-BIT SEA-LAND MASK -C ON GAUSSIAN GRID -C IUNTDT: UNIT NUMBER FOR READING RUN DATE ('YYYYMMDDHH') -C IUNTDC: UNIT NUMBER FOR RUN DATE ('YYYYMMDDHH') CHECK -C IUNTOK: UNIT NUMBER FOR PRELIMINARY QUALITY-CONTROLLED -C RECORDS. ***NOTE: AT THE END OF THIS PROGRAM, -C IUNTOK CONTAINS THE SHORT-TERM -C HISTORICAL RECORDS FOR THE NEXT -C INPUT TIME. -C IUNTAL: UNIT NUMBER FOR ALIAS FILE WHICH CONTAINS STORM IDS -C FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C IUNTAN: UNIT NUMBER FOR NEW ALIAS FILE -C IUNTCA: UNIT NUMBER FOR STORM CATALOG FILE WHICH CONTAINS -C CURRENT LISTING OF ALL STORMS, THEIR NAMES, DATES -C IDS AND ALIASES -C IUNTCN: UNIT NUMBER FOR SCRATCH STORM CATALOG -C IUNTCU: UNIT NUMBER FOR FINAL QUALITY-CONTROLLED RECORDS -C (CURRENT FILE) -C IUNTHO: UNIT NUMBER FOR THE SHORT-TERM HISTORICAL (ORIGINAL) -C VITAL STATISTICS RECORDS. LENGTH OF HISTORY -C CONTROLLED BY WINMIN. THESE ARE ORIGINAL RECORDS AND -C NOT ALIASED RECORDS! -C IUNTHA: UNIT NUMBER FOR THE SHORT-TERM HISTORICAL (ALIAS) -C VITAL STATISTICS RECORDS. LENGTH OF HISTORY -C CONTROLLED BY WINMIN. THESE ARE ALIAS RECORDS IF -C MULTIPLE OBSERVERS FOR A GIVEN STORM ARE PRESENT! -C IUNTHL: UNIT NUMBER FOR THE LONG-TERM HISTORICAL (PREVIOUS) -C VITAL STATISTICS RECORDS. ALL RECORDS, AND QUALITY -C CONTROL FLAGS ARE PUT INTO THIS FILE. -C IUNTVI: UNIT NUMBER FOR RAW VITAL STATISTICS FILE (NEITHER -C QUALITY CONTROLLED NOR CHECKED FOR DUPLICATES) -C WINMIN: WINDOW FOR SHORT-TERM HISTORY FILE (FRACTIONAL DAYS) -C WINMX1: WINDOW FOR MAXIMUM ACCEPTABLE DATE (FRACTIONAL DAYS) -C FOR RECORD PROCESSING -C WINCUR: WINDOW FOR WRITING CURRENT FILE (FRACTIONAL DAYS) -C FILES: TRUE IF NEW SHORT-TERM HISTORY FILE IS CREATED AND -C ALL NEW RECORD FILES ARE ZEROED OUT -C LNDFIL: TRUE IF RECORDS OF STORMS OVER COASTAL POINTS ARE -C NOT COPIED TO THE FILE OF CURRENT QUALITY CONTROLLED -C RECORDS. - - DIMENSION RINC(5) - - DIMENSION IVSBRS(0:IVSBMX) - LOGICAL FILES,LNDFIL - CHARACTER RUNID*12 - - NAMELIST/INPUT/IDATEZ,IUTCZ,RUNID,FILES,LNDFIL,MAXUNT,WINMIN, - 1 NVSBRS,IVSBRS,WINCUR - - DATA IUNTSL/11/,IUNTDT/12/,IUNTDC/54/,IUNTOK/20/,IUNTHO/21/, - 1 IUNTVI/30/,MAXUNT/2/,IUNTCU/60/,IUNTHL/61/,WINMIN/4./, - 2 WINMX1/0.0833333/,IEFAIL/MAXRC*0/,LNDFIL/.TRUE./,IUNTOP/3/, - 3 IUNTHA/22/,IUNTAL/25/,IUNTAN/26/,NVSBRS/0/,IVSBRS/IVSBM1*0/, - 4 WINCUR/0.25/,FIVMIN/3.4722E-3/,FILES/.FALSE./,IUNTCA/27/, - 5 IUNTCN/28/,IUNTSN/14/ - DATA NNNREC/'12345678901234567890123456789012345678901234567890123 - 1456789012345678901234567890123456789012345*****'/ - DATA ZZZREC/'RSMC#SID#NAMEZZZZZ#YYYYMMDD#HHMM#LATZ#LONGZ#DIR#SPD#P - 1CEN#PENV#RMAX#VM#RMW#15NE#15SE#15SW#15NW#D*****'/ - DATA - 1 XXXREC/' FL BL RD DT LL ID MR SV DS'/ - - CALL W3TAGB('SYNDAT_QCTROPCY',2013,0053,0050,'NP22 ') - -C INITIALIZE SOME VARIABLES THAT MIGHT GET USED BEFORE GETTING SET -C UNDER CERTAIN CONDITIONS - IERCHK=0 - IERRCN=0 - NTBP=0 - -C OPEN FILES - - filnam(0)='fildef.vit' - CALL OFILE0(IUNTOP,MAXFIL,NFTOT,FILNAM) - -C READ RUN DATE AND CONVERT TO FLOATING POINT DATE. -C THE RUN DATE ACCEPTANCE WINDOW IS NOT SYMMETRIC ABOUT -C THE CURRENT RUN DATE - - READ(5,INPUT) - WRITE(6,INPUT) - -C GET CURRENT RUN DATE AND OFFSET IN SJL FORMAT -C OFFSET ROUNDED TO THE NEAREST HOUR FROM W3 CALLS - - IOFFTM = 0 - - IF(IDATEZ .LT. 0) THEN - CALL SLDATE(IUNTDC,IDATCK,IUTCCK,IOFFTM) - CALL SLDATE(IUNTDT,IDATEZ,IUTCZ,IOFFTM) - IF(FILES .AND. IDATCK .EQ. IDATEZ .AND. IUTCCK .EQ. IUTCZ) THEN - WRITE(6,1) FILES,IDATCK,IUTCCK - 1 FORMAT(/'######WITH FILES=',L2,' THIS PROGRAM HAS RUN PREVIOUSLY', - 1 ' FOR DATE,TIME=',I9,I5) - ISTOP=5 - GO TO 1000 - ENDIF - ENDIF - - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAY0) - HROFF =IOFFTM*.01 - CYCOFF=(1.0+HROFF)/24. - IF(HROFF .GT. 24.) HROFF=-99.99 - - WRITE(6,2) IOFFTM,CYCOFF - 2 FORMAT(/'...OFFTIM,CYCOFF=',I12,F12.5) - -C THE MINIMUM WINDOW DETERMINES THE OLDEST RECORD THAT CAN -C BE PROCESSED BY QUALITY CONTROL. IT IS ALSO THE TIME COVERED -C BY THE SHORT-TERM HISTORICAL STORMS IN THE WORKING FILE. - -C THERE ARE TWO MAXIMUM WINDOWS: THE SHORT ONE (DAYMX1=2 HR) IS -C FOR PROCESSING RECORDS NO LATER THAN THE CYCLE TIME. THE -C LARGER ONE (DAYMX2) EXTENDS TO THE CURRENT TIME (THE TIME AT -C WHICH THIS PROGRAM IS RUN) PLUS 1 HOUR. RECORDS LATER THAN -C DAYMX1 BUT EARLIER THAN DAYMX2 WILL BE "THROWN BACK INTO -C THE POND" AND WILL BE PROCESSED AT THE NEXT CYCLE. - - DAYMIN=DAY0-WINMIN - DAYMX1=DAY0+WINMX1 - DAYMX2=DAY0+CYCOFF - DAYCUR=DAY0-WINCUR - DAYOFF=0.0 - - DAYMX1=DAYMX1+DAYOFF - - WRITE(6,3) WINMIN,WINMX1,DAYMIN,DAYMX1,DAYMX2 - 3 FORMAT(/'...WINMIN,WINMX1,DAYMIN,DAYMX1,DAYMX2=',/,4X,5F12.3) - - WRITE(6,5) IDATEZ,IUTCZ,DAY0,RUNID,LNDFIL,FILES - 5 FORMAT(20X,'***********************************************'/ - 1 20X,'***********************************************'/ - 2 20X,'**** WELCOME TO SYNDAT_QCTROPCY ****'/ - 3 20X,'**** Y2K/F90 VERSION - 17 MARCH 2013 ****'/ - 4 20X,'**** ****'/ - 5 20X,'**** VITAL STATISTICS RECORD CHECKER ****'/ - 6 20X,'**** FOR DATE=',I8,' UTC=',I4.4,10X,'****'/ - 7 20X,'**** JULIAN DAY=',F10.3,16X,'****'/ - 8 20X,'**** RUNID=',A12,' LNDFIL=',L1,' FILES=',L1,4X,'****'/ - 9 20X,'**** 1) INPUT RECORDS ARE CHECKED FOR ****'/ - O 20X,'**** EXACT DUPLICATES ****'/ - 1 20X,'**** 2) QUALITY CONTROL CHECKS. ****'/ - 2 20X,'**** FIRST: PRIMARY INFORMATION ****'/ - 3 20X,'**** (RECOVERY IS ESSENTIAL) ****'/ - 4 20X,'**** A) ALL COLUMNS ****'/ - 5 20X,'**** B) DATE/TIME ****'/ - 6 20X,'**** C) POSITION ****'/ - 7 20X,'**** SECOND: SECONDARY INFO. ****') - WRITE(6,6) - 6 FORMAT(20X,'**** (RECOVERY FROM PERSIS.) ****'/ - 1 20X,'**** D) DIRECTION/SPEED ****'/ - 2 20X,'**** E) RMAX, PENV, PCEN, STM DEPTH ****'/ - 3 20X,'**** THIRD: TERTIARY INFORMATION ****'/ - 4 20X,'**** (RECOVERY DESIRABLE) ****'/ - 5 20X,'**** F) VMAX, RMW ****'/ - 6 20X,'**** G) R15 NE, SE, SW, NW ****'/ - 7 20X,'**** ****'/ - 8 20X,'***********************************************'/ - 9 20X,'***********************************************'/) - - WRITE(6,7) IUNTSL,IUNTDT,IUNTSN,IUNTOK,IUNTCU,IUNTAL,IUNTAN, - 1 IUNTCA,IUNTCN,IUNTHO,IUNTHA,IUNTHL,IUNTVI - 7 FORMAT(20X,'I/O UNITS ARE:'/ - 1 22X,'SEA/LAND MASK =IUNTSL =',I3/ - 2 22X,'RUN DATE (YYYYMMDDHH) =IUNTDT =',I3/ - 3 22X,'STORM NAMES =IUNTSN =',I3/ - 4 22X,'PRELIMINARY Q/C RECORDS =IUNTOK =',I3/ - 5 22X,'FINAL Q/C RECORDS =IUNTCU =',I3/ - 6 22X,'STORM ID ALIAS =IUNTAL =',I3/ - 7 22X,'NEW STORM ID ALIAS =IUNTAN =',I3/ - 8 22X,'STORM CATALOG =IUNTCA =',I3/ - 9 22X,'SCRATCH STORM CATALOG =IUNTCN =',I3/ - O 22X,'SHORT TERM HIST. (ORIG.)=IUNTHO =',I3/ - 1 22X,'SHORT TERM HIST. (ALIAS)=IUNTHA =',I3/ - 2 22X,'LONG TERM HIST. =IUNTHL =',I3/ - 3 22X,'NEW RECORDS =IUNTVI>=',I3) - -C SET UP THE T126 32-BIT SEA-LAND MASK ON GAUSSIAN GRID -C NTEST,NOKAY,NBAD ARE ALL MEANINGLESS NUMBERS AT THIS POINT - - NTEST=1 - NOKAY=1 - NBAD =1 - CALL SETMSK(IUNTSL,NTEST,NOKAY,NBAD,IECOST,IEFAIL(1:MAXREC,4), - 1 NUMTST,NUMOKA,NUMBAD,ZZZREC,NNNREC,TSTREC,BADREC, - 2 OKAREC) - -C INITIAL CHECKS ARE FOR EXACT DUPLICATES AND BLANKS IN THE -C CORRECT SPOT - - NOKAY=0 - NBAD=0 - CALL DUPCHK(IUNTVI,MAXUNT,MAXREC,IERCHK,NTEST,IEFAIL(1:MAXREC,0), - 1 NUMTST,DUMREC,TSTREC,BADREC,*500) - -C SAVE THE INPUT UNIT NUMBERS FOR ALL RECORDS - - IUNTIN(1:NTEST)=IEFAIL(1:NTEST,0) -C - CALL BLNKCK(NTEST,NOKAY,NBAD,IEFAIL(1:MAXREC,1),NUMTST,NUMOKA, - 1 NUMBAD,ZZZREC,NNNREC,TSTREC,BADREC,OKAREC) - -C RELOAD THE TEST RECORDS - - NTEST=NOKAY - NUMTST(1:NOKAY)=NUMOKA(1:NOKAY) - TSTREC(1:NOKAY)=OKAREC(1:NOKAY) - NOKAY=0 - - CALL READCK(NTEST,NOKAY,NBAD,IEFAIL(1:MAXREC,2),NUMTST,NUMOKA, - 1 NUMBAD,ZZZREC,NNNREC,TSTREC,BADREC,OKAREC) - -C RELOAD THE TEST RECORDS AGAIN - - NTEST=NOKAY - NUMTST(1:NOKAY)=NUMOKA(1:NOKAY) - TSTREC(1:NOKAY)=OKAREC(1:NOKAY) - NOKAY=0 - NTBP=MAXTBP -C - CALL DTCHK(NTEST,NOKAY,NBAD,NTBP,IEFAIL(1:MAXREC,3),NUMTST,NUMOKA, - 1 NUMBAD,NUMTBP,DAYMIN,DAYMX1,DAYMX2,DAYOFF,TSTREC, - 2 BADREC,OKAREC,TBPREC) - -C ENCORE, UNE FOIS - - NTEST=NOKAY - NUMTST(1:NOKAY)=NUMOKA(1:NOKAY) - TSTREC(1:NOKAY)=OKAREC(1:NOKAY) - NOKAY=0 - - CALL LLCHK(IUNTSL,NTEST,NOKAY,NBAD,IEFAIL(1:MAXREC,4),NUMTST, - 1 NUMOKA,NUMBAD,ZZZREC,NNNREC,TSTREC,BADREC,OKAREC) - -C ONE MORE TIME (POUR CEUX QUI NE PARLE PAS FRANCAIS) - - NTEST=NOKAY - NUMTST(1:NOKAY)=NUMOKA(1:NOKAY) - TSTREC(1:NOKAY)=OKAREC(1:NOKAY) - NOKAY=0 - - CALL STIDCK(IUNTHO,IUNTSN,IUNTCA,NTEST,IYR,MAXREC,NOKAY,NBAD, - 1 IEFAIL(1:MAXREC,5),IDUPID,NUMTST,NUMOKA,NUMBAD,ZZZREC, - 2 NNNREC,TSTREC,BADREC,OKAREC,SCRATC) - - -C ***************************************************************** -C ***************************************************************** -C **** **** -C **** END OF THE FIRST PHASE OF ERROR CHECKING. FROM NOW **** -C **** ON, THE ORIGINAL RECORD SHORT-TERM HISTORY FILE IS **** -C **** CLOSED AND THE ALIAS SHORT-TERM HISTORY FILE IS OPEN. **** -C **** SOME INPUT RECORDS MAY BE CHANGED DUE TO SUBSTITUTION **** -C **** OF MISSING VALUES OR AVERAGING OF MULTIPLE STORM **** -C **** REPORTS. **** -C **** **** -C ***************************************************************** -C ***************************************************************** - -C MULTIPLE RSMC CHECK: SAME STORM REPORTED BY MORE THAN ONE -C TROPICAL CYCLONE WARNING CENTER. - -C CHECK FOR: -C 1) MULTIPLE STORM REPORTS BY DIFFERENT RSMC'S AT THE SAME TIME -C 2) TIME SERIES OF REPORTS ON THE SAME STORM BY DIFFERENT RSMC'S -C RECONCILE THE ABOVE: -C 1) ASSIGN A COMMON STORM ID -C 2) REMOVE MULTIPLE REPORTS IN FAVOR OF A SINGLE REPORT WITH THE -C COMMON STORM ID AND COMBINED (AVERAGED) PARAMETERS IF -C NECESSARY - -CCCC NTEST=NOKAY -CCCC WRITE(6,61) XXXREC -CCC61 FORMAT(///'...THE FOLLOWING ACCEPTABLE RECORDS ARE ELIGIBLE FOR ', -CCCC 1 'THE MULTIPLE RSMC CHECK.'/4X,'ERROR CODES ARE:'/21X, -CCCC 2 '=0: NO ERRORS OCCURRED'/21X,'<0: SUCCESSFUL ERROR ', -CCCC 3 'RECOVERY',55X,A/) - -CCCC DO NOK=1,NOKAY -CCCC NUMTST(NOK)=NUMOKA(NOK) -CCCC TSTREC(NOK)=OKAREC(NOK) -CCCC WRITE(6,67) NOK,OKAREC(NOK)(1:MAXCHR),(IEFAIL(NUMOKA(NOK),ICK), -CCCC 1 ICK=0,MAXCKS) - 67 FORMAT('...',I3,'...',A,'...',I2,8I3) -CCCC ENDDO -CCCC NOKAY=0 -CCCC REWIND IUNTOK - -c Stopgap measure is to not allow records to be written into -c the alias short-term history file (17 Sept. 1998) - NRCOVR=0 -CCCC CALL RSMCCK(IUNTHO,IUNTHA,IUNTAL,IUNTAN,IUNTCA,IUNTOK,NVSBRS, -CCCC 1 IVSBRS,MAXREC,NTEST,NOKAY,NBAD,NRCOVR, -CCCC 2 IEFAIL(1:MAXREC,6),NUMTST,NUMOKA,NUMBAD,IDUPID,TSTREC, -CCCC 3 BADREC,OKAREC,SCRATC) - -C COPY ALIAS SHORT-TERM HISTORY RECORDS FROM THE PRELIMINARY -C (SCRATCH) FILE TO THE ALIAS SHORT-TERM HISTORY FILE ONLY -C WHEN WE WISH TO UPDATE THE SHORT-TERM HISTORY FILE. - - IF(FILES) THEN - ICALL=1 - REWIND IUNTHA - WRITE(6,93) - 93 FORMAT(/'...THE FOLLOWING RECORDS WILL BE COPIED FROM THE ', - 1 'PRELIMINARY QUALITY CONTROLLED FILE TO THE ALIAS ', - 2 'SHORT-TERM HISTORICAL FILE:') - - CALL CPYREC(ICALL,IUNTOK,IUNTHA,NOKAY,DAYMIN,DUMREC,OKAREC) - ENDIF - -C BEGIN CHECKS FOR SECONDARY STORM INFORMATION WHICH INCLUDES: -C 1) DIRECTION, SPEED -C 2) PCEN, PENV, RMAX, STORM DEPTH -C THESE NUMBERS ARE NEEDED BY YOGI. IF MISSING, WE TRY TO -C FILL THEM IN BY PERSISTENCE. - -C FIRST, COPY HISTORICAL RECORDS TO THE PRELIMINARY QUALITY -C CONTROLLED FILE AND THEN COPY THE RECORDS FROM THE CURRENT FILE. - -C COPY HISTORICAL RECORDS TO PRELIMINARY FILE, CHECK FOR DUPLICATES - - REWIND IUNTOK - IF(FILES) THEN - ICALL=3 - WRITE(6,95) DAYMIN,ICALL - 95 FORMAT(/'...THE FOLLOWING RECORDS, HAVING DATES GREATER THAN ', - 1 'OR EQUAL TO DAY',F10.3,', WILL BE CHECKED FOR EXACT ', - 2 'AND PARTIAL DUPLICATES '/4X,'(ICALL=',I2,')', - 3 'AND COPIED FROM THE ALIAS SHORT-TERM HISTORICAL FILE ', - 4 'TO THE PRELIMINARY QUALITY CONTROLLED FILE WHICH NOW ', - 5 'WILL CONTAIN '/4X,'ALIAS RECORDS:'/) - - CALL CPYREC(ICALL,IUNTHA,IUNTOK,NOKAY,DAYMIN,DUMREC,OKAREC) - - ELSE - WRITE(6,97) - 97 FORMAT(/'...THE FOLLOWING RECORDS WILL BE COPIED FROM THE ', - 1 'SCRATCH ARRAY TO THE PRELIMINARY QUALITY CONTROLLED ', - 2 'FILE:') - DO NRC=1,NRCOVR - WRITE(6,105) SCRATC(NRC) - 105 FORMAT(' ...',A,'...') - WRITE(IUNTOK,107) SCRATC(NRC) - 107 FORMAT(A) - ENDDO - ENDIF - -C OH NO, NOT AGAIN!!! - - NTEST=NOKAY - write(6,1011) ntest - 1011 format(/'***debug ntest=nokay=',i4/) - WRITE(6,111) - 111 FORMAT(/'...IN PREPARATION FOR SECONDARY VARIABLE CHECKING, THE ', - 1 'FOLLOWING ACCEPTABLE RECORDS WILL BE '/4X,'ADDED TO THE', - 2 ' PRELIMINARY,QUALITY CONTROLLED FILE:'/) - DO NOK=1,NOKAY - NUMTST(NOK)=NUMOKA(NOK) - TSTREC(NOK)=OKAREC(NOK) - WRITE(6,113) NOK,NUMOKA(NOK),OKAREC(NOK) - 113 FORMAT(' ...',I4,'...',I4,'...',A) - WRITE(IUNTOK,119) OKAREC(NOK) - 119 FORMAT(A) - ENDDO - - NOKAY=0 - CALL SECVCK(IUNTOK,NTEST,NOKAY,NBAD,NUMTST,NUMOKA,NUMBAD,DAY0, - 1 DAYMIN,DAYMX1,DAYOFF,IEFAIL(1:MAXREC,7),ZZZREC,NNNREC, - 2 SCRREC,TSTREC,BADREC,OKAREC) - -C COPY HISTORICAL RECORDS TO PRELIMINARY FILE, CHECK FOR DUPLICATES - - REWIND IUNTOK - IF(FILES) THEN - ICALL=3 - WRITE(6,95) DAYMIN,ICALL - CALL CPYREC(ICALL,IUNTHA,IUNTOK,NOKAY,DAYMIN,DUMREC,OKAREC) - - ELSE - WRITE(6,97) - DO NRC=1,NRCOVR - WRITE(6,105) SCRATC(NRC) - WRITE(IUNTOK,107) SCRATC(NRC) - ENDDO - ENDIF - - NTEST=NOKAY - WRITE(6,201) - 201 FORMAT(//'...THE FOLLOWING ACCEPTABLE RECORDS WILL BE ADDED TO ', - 1 'THE PRELIMINARY QUALITY CONTROLLED FILE '/4X,'IN ', - 2 'PREPARATION FOR DIRECTION/SPEED CHECKING.'/) - DO NOK=1,NOKAY - NUMTST(NOK)=NUMOKA(NOK) - TSTREC(NOK)=OKAREC(NOK) - WRITE(6,203) NOK,OKAREC(NOK) - 203 FORMAT(' ...',I4,'...',A) - WRITE(IUNTOK,207) OKAREC(NOK) - 207 FORMAT(A) - ENDDO - - NOKAY=0 - -C SEA/LAND MASK CHECK - - CALL SELACK(NTEST,NOKAY,NBAD,IECOST,IEFAIL(1:MAXREC,4),NUMTST, - 1 NUMOKA,NUMBAD,LNDFIL,ZZZREC,NNNREC,TSTREC,BADREC, - 2 OKAREC) - - WRITE(6,301) XXXREC - 301 FORMAT(/'...THE SECONDARY VARIABLE, DIR/SPD AND SEA/LAND ', - 1 'CHECKING HAVE CONCLUDED. ERROR CHECKING HAS ENDED.'/4X, - 2 'OKAY RECORDS AND ERROR CODES ARE:',69X,A/) - - DO NOK=1,NOKAY - WRITE(6,67) NOK,OKAREC(NOK)(1:MAXCHR),IEFAIL(NUMOKA(NOK),0), - 1 (-IABS(IEFAIL(NUMOKA(NOK),ICK)), - 1 ICK=1,MAXCKS) - ENDDO - - WRITE(6,311) XXXREC - 311 FORMAT(/'...BAD RECORDS AND ERROR CODES ARE:',71X,A/) - - DO NBA=1,NBAD - WRITE(6,67) NBA,BADREC(NBA)(1:MAXCHR),IEFAIL(NUMBAD(NBA),0), - 1 (IEFAIL(NUMBAD(NBA),ICK),ICK=1,MAXCKS) - - ENDDO - -C RECONCILE THE STORM IDS WITH THE STORM CATALOG - -C LET'S PRETEND WE'RE NOT GOING TO DO IT, BUT DO IT ANYWAY - - NTEST=NOKAY+NBAD - WRITE(6,401) XXXREC - 401 FORMAT(///'...THE FOLLOWING ACCEPTABLE RECORDS WILL BE ', - 1 'RECONCILED WITH THE STORM CATALOG.'/4X,'ERROR CODES ', - 2 'ARE:'/21X,'=0: NO ERRORS OCCURRED'/21X,'<0: ', - 3 'SUCCESSFUL ERROR RECOVERY',56X,A/) - - DO NOK=1,NOKAY - NUMTST(NOK)=NUMOKA(NOK) - TSTREC(NOK)=OKAREC(NOK) - WRITE(6,67) NOK,OKAREC(NOK)(1:MAXCHR),IEFAIL(NUMOKA(NOK),0), - 1 (IEFAIL(NUMOKA(NOK),ICK),ICK=1,MAXCKS) - ENDDO - WRITE(6,411) XXXREC - 411 FORMAT(//'...THE FOLLOWING BAD RECORDS WILL BE RECONCILED WITH ', - 1 'THE STORM CATALOG FOR OVERLAND OR OVERLAPPING STORM ', - 2 'CASES.'/4X,'ERROR CODES ARE:'/21X,'>0: ERROR FOUND',70X, - 3 A/) - DO NBA=1,NBAD - NUMTST(NOKAY+NBA)=NUMBAD(NBA) - TSTREC(NOKAY+NBA)=BADREC(NBA) - IF(IEFAIL(NUMBAD(NBA),4) .EQ. 5 .OR. - 1 IEFAIL(NUMBAD(NBA),4) .EQ. 6 .OR. - 2 IEFAIL(NUMBAD(NBA),6) .EQ. 22) THEN - WRITE(6,67) NBA+NOKAY,BADREC(NBA)(1:MAXCHR),IEFAIL(NUMBAD(NBA),0), - 1 (IEFAIL(NUMBAD(NBA),ICK),ICK=1,MAXCKS) - ENDIF - ENDDO - - call rcncil(iuntca,iuntcn,iuntal,ntest,nokay,nbad,maxrec,maxcks, - 1 iefail,ierrcn,idupid,numtst,numoka,numbad,tstrec, - 2 badrec,okarec) - -C CLEAR OUT THE TEMPORARY ALIAS FILE; AKAVIT IS IN ITS FINAL FORM. - - REWIND IUNTAN - END FILE IUNTAN - -C ERROR CHECKING HAS FINALLY ENDED - - 500 WRITE(6,501) XXXREC - 501 FORMAT(//'...THE FINAL ERROR CHECKING HAS ENDED. BAD RECORDS ', - 1 'AND ERROR CODES ARE:',36X,A/) - ISTP90=0 - ISTPBR=0 - DO NBA=1,NBAD - DO NCK=1,MAXCKS - -C SELECT APPROPRIATE CONDITION CODE FOR STOP - - IF(IEFAIL(NUMBAD(NBA),NCK) .EQ. 2 .AND. NCK .EQ. 5) THEN - ISTP90=1 - ELSE IF(IEFAIL(NUMBAD(NBA),NCK) .NE. 0) THEN - ISTPBR=2 - ENDIF - ENDDO - - WRITE(6,543) NBA,BADREC(NBA)(1:MAXCHR),(IEFAIL(NUMBAD(NBA),ICK), - 1 ICK=0,MAXCKS) - 543 FORMAT(' ...',I3,'...',A,'...',I2,8I3) - ENDDO - ISTOP=ISTP90+ISTPBR - IF(IERCHK .EQ. 161) ISTOP=04 - IF(IERRCN .NE. 0) ISTOP=10 - WRITE(6,551) ISTP90,ISTPBR,IERRCN,ISTOP - 551 FORMAT(/'...STOP CODES ARE: ISTP90,ISTPBR,IERRCN,ISTOP=',4I3) - -C ADD FIRST OCCURRENCE FLAGS BY CHECKING THE SHORT-TERM HISTORY -C FILE - - CALL ADFSTF(IUNTHA,NOKAY,NBAD,MAXREC,MAXCKS,IECOST,NUMBAD,IEFAIL, - 1 DUMREC,OKAREC,BADREC) - -C WRITE THE RESULTS OF THE Q/C PROGRAM TO A LONG-TERM HISTORICAL -C FILE - - NRTOT=NOKAY+NBAD - CALL RITHIS(-IUNTHL,IEFAIL,NRTOT,IDATEZ,IUTCZ,NUMOKA,NOKAY,MAXREC, - 1 MAXCKS,HROFF,WINCUR,RUNID,LNDFIL,FILES,OKAREC,ZZZREC, - 2 XXXREC) - CALL RITHIS(IUNTHL,IEFAIL,NRTOT,IDATEZ,IUTCZ,NUMBAD,NBAD,MAXREC, - 1 MAXCKS,HROFF,WINCUR,RUNID,LNDFIL,FILES,BADREC,ZZZREC, - 2 ZZZREC) - -C UPDATE THE SHORT-TERM HISTORY FILES. -C **** IMPORTANT NOTE: ALL INFORMATION FROM TSTREC,OKAREC,BADREC, -C NUMTST,NUMOKA,NUMBAD WILL BE LOST **** -C **** PRENEZ GARDE **** - - IF(FILES) THEN - CALL RITSTH(IUNTHA,IUNTHO,IUNTOK,NOKAY,NBAD,DAYMIN,IECOST,MAXCKS, - 1 MAXREC,NUMBAD,IEFAIL,DUMREC,OKAREC,BADREC) - - CALL FNLCPY(IUNTVI,MAXUNT,IUNTOK,IUNTHA,MAXREC,NTBP,NUMTBP,IUNTIN, - 1 TBPREC,DUMREC) - NTEST=0 - NOKAY=0 - IUNTRD=IUNTOK - -C NOPE: SORRY, ONE LAST TIME, BUT ONLY FOR FILES=.FALSE. - - ELSE - NTEST=NOKAY - IUNTRD=IUNTHA - NUMTST(1:NOKAY)=NUMOKA(1:NOKAY) - TSTREC(1:NOKAY)=OKAREC(1:NOKAY) - NOKAY=0 - - ENDIF - -C WRITE THE FILE CONTAINING ALL CURRENT QUALITY CONTROLLED RECORDS - - CALL YTIME(IYR,DAYCUR+FIVMIN,IDATCU,JUTCCU) - CALL RITCUR(IUNTRD,IUNTCU,NTEST,NOKAY,NBAD,IDATCU,JUTCCU,DAYCUR, - 1 MAXREC,IEFAIL(1:MAXREC,4),NUMTST,NUMOKA,NUMBAD,FILES, - 2 LNDFIL,ZZZREC,NNNREC,DUMREC,SCRREC,TSTREC,OKAREC, - 3 BADREC) - -C CLEAN OUT THE SCRATCH FILE - - REWIND IUNTOK - END FILE IUNTOK - - 1000 CONTINUE - IF(FILES) CALL SLDTCK(IUNTDC) - - WRITE(6,1115) - 1115 FORMAT(////20X,'*******************************************' - 1 /20X,'*******************************************' - 2 /20X,'**** ****' - 3 /20X,'**** SUCCESSFUL COMPLETION OF ****' - 4 /20X,'**** SYNDAT_QCTROPCY ****' - 5 /20X,'**** ****' - 6 /20X,'*******************************************' - 7 /20X,'*******************************************') - - CALL W3TAGE('SYNDAT_QCTROPCY') - -ccccc IF(ISTOP .EQ. 0) THEN - STOP -ccccc ELSE IF(ISTOP .EQ. 1) THEN -ccccc call ERREXIT (1) -ccccc ELSE IF(ISTOP .EQ. 2) THEN -ccccc call ERREXIT (2) -ccccc ELSE IF(ISTOP .EQ. 3) THEN -ccccc call ERREXIT (3) -ccccc ELSE IF(ISTOP .EQ. 04) THEN -ccccc call ERREXIT (4) -ccccc ELSE IF(ISTOP .EQ. 05) THEN -ccccc call ERREXIT (5) -ccccc ELSE IF(ISTOP .EQ. 10) THEN -ccccc call ERREXIT (10) -ccccc ENDIF - - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: RSMCCK CHECKS FOR MULTIPLE STORM REPORTS -C PRGMMR: S. LORD ORG: NP22 DATE: 1992-02-19 -C -C ABSTRACT: INPUT RECORDS ARE CHECKED FOR MULTIPLE REPORTS ON THE SAME -C STORM FROM DIFFERENT RSMC'S. THE FOLLOWING ACTIONS ARE -C TAKEN: -C 1) MULTIPLE STORM REPORTS BY DIFFERENT RSMC'S AT THE SAME -C TIME ARE REMOVED -C 2) TIME SERIES OF REPORTS ON THE SAME STORM BY DIFFERENT -C RSMC'S ARE DISCOVERED -C TO RECONCILE THE ABOVE: -C 1) A COMMON STORM ID IS ASSIGNED -C 2) MULTIPLE REPORTS ARE REMOVED IN FAVOR OF A SINGLE -C REPORT WITH THE COMMON STORM ID AND COMBINED -C (AVERAGED) PARAMETERS IF NECESSARY -C -C PROGRAM HISTORY LOG: -C 1992-02-19 S. LORD -C 1992-07-16 S. LORD FIXED SOME BUGS (390); ADDED RETURN CODE 2. -C 1993-03-09 S. LORD ADDED CODE FOR COMPATIBILITY WITH RCNCIL -C 2013-10-10 D. C. STOKES - ADDED NON-HYPHNATED CARDINAL NUMBER NAMES -C ALSO EXTENDED THAT LIST (FROM 36 TO 39). -C -C USAGE: CALL RSMCCK(IUNTHO,IUNTHA,IUNTAL,IUNTAN,IUNTOK,NVSBRS,IVSBRS, -C MAXOVR,NTEST,NOKAY,NBAD,NRCOVR,IFRSMC,NUMTST, -C NUMOKA,NUMBAD,IOVRLP,TSTREC,BADREC,OKAREC,OVRREC) -C INPUT ARGUMENT LIST: -C IUNTHO - UNIT NUMBER FOR SHORT-TERM HISTORY FILE OF ORIGINAL -C - RECORDS. -C IUNTHA - UNIT NUMBER FOR SHORT-TERM HISTORY FILE OF ALIASED -C - RECORDS. -C IUNTAL - UNIT NUMBER FOR ALIAS FILE. -C IUNTAN - UNIT NUMBER FOR NEW ALIAS FILE. -C IUNTOK - UNIT NUMBER FOR SCRATCH FILE. -C NVSBRS - NUMBER OF ALLOWABLE VARIABLES FOR SUBSTITUTION. -C IVSBRS - INDEX OF ALLOWABLE VARIABLES FOR SUBSTITUTION. -C MAXOVR - DIMENSION FOR SCRATCH SPACE. -C NTEST - NUMBER OF CURRENT RECORDS TO BE TESTED. -C NUMTST - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH RECORD -C - TO BE TESTED. -C IOVRLP - SCRATCH ARRAY. -C TSTREC - CHARACTER ARRAY CONTAINING RECORDS TO BE TESTED. -C -C OUTPUT ARGUMENT LIST: -C NOKAY - NUMBER OF RECORDS THAT PASSED THE RSMC CHECK. -C NBAD - NUMBER OF RECORDS THAT FAILED THE RSMC CHECK. -C NRCOVR - NUBER OF RECORDS RETURNED IN OVRREC. THESE CONTAIN -C - UPDATED ALIAS SHORT-TERM HISTORY RECORDS FOR USE WHEN -C - FILES=F. -C IFRSMC - INTEGER ARRAY CONTAINING ERROR CODE FOR EACH INPUT -C - RECORD. SEE COMMENTS IN PGM FOR KEY TO ERROR CODES. -C NUMOKA - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH GOOD -C - RECORD. -C NUMBAD - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH BAD -C - RECORD. -C BADREC - CHARACTER ARRAY CONTAINING BAD RECORDS THAT FAILED -C - THE RSMC CHECK. -C OKAREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT PASSED -C - THE RSMC CHECK. -C OVRREC - CHARACTER ARRAY CONTAINING UPDATED ALIAS SHORT-TERM -C - HISTORY RECORDS. -C -C INPUT FILES: -C UNIT 20 - SCRATCH FILE CONTAINING SHORT-TERM HISTORY RECORDS -C UNIT 21 - ORIGINAL SHORT-TERM HISTORY FILE CONTAINING RECORDS -C PROCESSED BY THIS PROGRAM FOR THE LAST SEVERAL DAYS. -C IN THIS FILE, THE ORIGINAL RSMC AND STORM ID ARE KEPT. -C UNIT 22 - ALIAS SHORT-TERM HISTORY FILE CONTAINING RECORDS -C PROCESSED BY THIS PROGRAM FOR THE LAST SEVERAL DAYS. -C IN THIS FILE, THE RSMC AND STORM ID HAVE BEEN UNIFIED. -C UNIT 25 - ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C - FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C - DCB: LRECL=255, BLKSIZE=23400, RECFM=VB -C UNIT 26 - NEW ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C - FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 20 - SCRATCH FILE CONTAINING SHORT-TERM HISTORY RECORDS -C UNIT 25 - ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C - FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C - DCB: LRECL=255, BLKSIZE=23400, RECFM=VB -C UNIT 26 - NEW ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C - FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C - NOTE: UCL SHOULD COPY THIS FILE TO FT22F001 (THE OLD -C - ALIAS FILE) AT THE END OF EXECUTION. -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE RSMCCK(IUNTHO,IUNTHA,IUNTAL,IUNTAN,IUNTCA,IUNTOK, - 1 NVSBRS,IVSBRS,MAXOVR,NTEST,NOKAY,NBAD,NRCOVR, - 2 IFRSMC,NUMTST,NUMOKA,NUMBAD,IOVRLP,TSTREC, - 3 BADREC,OKAREC,OVRREC) - - PARAMETER (NERCRS=10) - PARAMETER (MAXSTM=70) - PARAMETER (NOVRMX=MAXSTM) - PARAMETER (NADDMX=10) - PARAMETER (MAXREC=1000) - - SAVE - - CHARACTER*(*) TSTREC(0:NTEST),BADREC(MAXREC),OKAREC(NTEST), - 1 ERCRS(NERCRS)*60,OVRREC(MAXOVR) - CHARACTER*100 DUMY2K - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - PARAMETER (NBASIN=11) - PARAMETER (NRSMCX=4) - PARAMETER (NRSMCW=2) - PARAMETER (NCRDMX=57) - - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 LATNS*1,LONEW*1,FMTVIT*6,BUFINZ*100,RELOCZ*1,NAMVAR*5, - 2 IDBASN*1,NABASN*16,RSMCID*4,RSMCAP*1,CARDNM*9 - - DIMENSION IVTVAR(MAXVIT),VITVAR(MAXVIT),VITFAC(MAXVIT), - 1 ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION NAMVAR(MAXVIT+1),IDBASN(NBASIN),NABASN(NBASIN), - 1 BUFIN(MAXCHR),FMTVIT(MAXVIT), - 2 RSMCID(NRSMCX),RSMCAP(NRSMCX),RSMCPR(NBASIN), - 3 RSMCWT(NRSMCW),CARDNM(NCRDMX) - - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - EQUIVALENCE (VITVAR( 3),STMLTZ),(VITVAR( 4),STMLNZ), - 1 (VITVAR( 5),STMDRZ),(VITVAR( 6),STMSPZ), - 2 (VITVAR( 7),PCENZ), (VITVAR( 8),PENVZ), - 3 (VITVAR( 9),RMAXZ) - - CHARACTER STMNAM*9,STMID*3,RSMC*4 - - DIMENSION STMNAM(MAXSTM),STMLAT(MAXSTM),STMLON(MAXSTM), - 1 IDATE(MAXSTM),IUTC(MAXSTM),RMAX(MAXSTM),PENV(MAXSTM), - 2 PCEN(MAXSTM),RSMC(MAXSTM),STMID(MAXSTM) - - DIMENSION IFRSMC(MAXREC),NUMOKA(NTEST),NUMBAD(MAXREC), - 1 NUMTST(NTEST),IOVRLP(MAXOVR),IVSBRS(0:NVSBRS) - - DIMENSION IVTVRX(MAXVIT),VITVRX(MAXVIT) - - DIMENSION IPRIOR(NOVRMX),AVWT(NOVRMX),RSMCAL(NOVRMX), - 1 STIDAL(NOVRMX),STNMAD(NOVRMX),IRSMC(4),SRTDAY(NOVRMX), - 2 IDASRT(NOVRMX),INDSAM(NOVRMX),DAYZAD(NADDMX), - 3 RSMCOV(NOVRMX),STIDOV(NOVRMX), - 4 RSMCAD(NADDMX),STIDAD(NADDMX) - - DIMENSION RINC(5) - - CHARACTER BUFCK(MAXCHR)*1,RSMCX*4,RELOCX*1,STMIDX*3,BUFINX*100, - 1 STMNMX*9,LATNSX*1,LONEWX*1,BSCOFL*2,RPCOFL*2,STNMAL*9, - 2 RSMCAL*4,STIDAL*3,STNMAD*9,RSMCOV*4,STIDOV*3,STNMOV*9, - 3 STIDAD*3,RSMCAD*4,STHCH*21 - - LOGICAL OSTHFL - - EQUIVALENCE (BUFCK(1),RSMCX),(BUFCK(5),RELOCX),(BUFCK(6),STMIDX), - 1 (BUFCK(1),BUFINX),(BUFCK(10),STMNMX), - 2 (BUFCK(35),LATNSX),(BUFCK(41),LONEWX) - - EQUIVALENCE (IVTVRX(1),IDATEX),(IVTVRX(2),IUTCX), - 1 (VITVRX(3),STMLTX),(VITVRX(4),STMLNX), - 2 (VITVRX(5),STMDRX),(VITVRX(6),STMSPX), - 3 (VITVRX(7),PCENX), (VITVRX(8),PENVX), - 4 (VITVRX(9),RMAXX) - - DATA VITFAC/2*1.0,2*0.1,1.0,0.1,9*1.0/, - 1 FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 2 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 3 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 4 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/ - - DATA IDBASN/'L','E','C','W','O','T','U','P','S','B','A'/ - - DATA NABASN/'ATLANTIC ','EAST PACIFIC ', - 1 'CENTRAL PACIFIC ','WEST PACIFIC ', - 2 'SOUTH CHINA SEA ','EAST CHINA SEA ', - 3 'AUSTRALIA ','SOUTH PACIFIC ', - 4 'SOUTH INDIAN OCN','BAY OF BENGAL ', - 5 'NRTH ARABIAN SEA'/ - - DATA RSMCID/'NHC ','JTWC','ADRM','JMA '/, - 1 RSMCAP/'N','W','A','J'/,RSMCPR/3*1,3*2,3,4*2/, - 2 RSMCWT/1.0,0.25/ - - DATA NAMVAR/'DATE ','TIME ','LAT. ','LONG.','DIR ','SPEED', - 1 'PCEN ','PENV ','RMAX ','VMAX ','RMW ','R15NE', - 2 'R15SE','R15SW','R15NW','DEPTH'/ - -C CARDINAL NUMBER STORM NAMES FOR UNNAMED ATLANTIC AND EAST PACIFIC -C STORMS - - DATA CARDNM/'ONE ','TWO ','THREE ', - 1 'FOUR ','FIVE ','SIX ', - 2 'SEVEN ','EIGHT ','NINE ', - 3 'TEN ','ELEVEN ','TWELVE ', - 4 'THIRTEEN ','FOURTEEN ','FIFTEEN ', - 5 'SIXTEEN ','SEVENTEEN','EIGHTEEN ', - 6 'NINETEEN ','TWENTY ','TWENTY-ON', - 7 'TWENTY-TW','TWENTY-TH','TWENTY-FO', - 8 'TWENTY-FI','TWENTY-SI','TWENTY-SE', - 9 'TWENTY-EI','TWENTY-NI','THIRTY ', - O 'THIRTY-ON','THIRTY-TW','THIRTY-TH', - 1 'THIRTY-FO','THIRTY-FI','THIRTY-SI', - 2 'THIRTY-SE','THIRTY-EI','THIRTY-NI', - 3 'TWENTYONE','TWENTYTWO','TWENTYTHR', - 4 'TWENTYFOU','TWENTYFIV','TWENTYSIX', - 5 'TWENTYSEV','TWENTYEIG','TWENTYNIN', - 6 'THIRTYONE','THIRTYTWO','THIRTYTHR', - 7 'THIRTYFOU','THIRTYFIV','THIRTYSIX', - 8 'THIRTYSEV','THIRTYEIG','THIRTYNIN'/ - -C BUFZON: BUFFER ZONE REQUIRED BY SYNTHETIC DATA PROGRAM (SYNDATA) -C DEGLAT: ONE DEGREE LATITUDE IN KM -C RMAXMN: MINIMUM ALLOWABLE VALUE OF RMAX -C DTOVR : MINIMUM WINDOWN (FRACTIONAL DAYS) FOR OVERLAPPING STORMS -C EXTRAPOLATED TO A COMMON TIME. -C IPRT : CONTROLS PRINTOUT IN SUBROUTINE BASNCK -C FACSPD: CONVERSION FACTOR FOR R(DEG LAT)=V(M/S)*T(FRAC DAY)* -C FACSPD - - DATA BUFZON/1.0/,DEGLAT/111.1775/,RMAXMN/100./,DTOVR/1.0/, - 1 IPRT/0/,FIVMIN/3.4722E-3/,FACSPD/0.77719/ - - DATA ERCRS - 1 /' 1: CANNOT RESOLVE: SAME RSMC REPORTED OVERLAPPING STORMS ', - 2 '10: RESOLVED: SAME RSMC REPORTED OVERLAPPING STORMS ', - 3 ' 2: CANNOT RESOLVE: DIFF. RSMCS REPORTED DIFF. OVERL. STMS.', - 4 '21: DIFFERENT RSMCS REPORTED SAME OVERLAPPING STORMS (CUR) ', - 5 '22: DIFFERENT RSMCS REPORTED SAME OVERLAPPING STORMS (OSTH)', - 6 '30: UNIFIED RECORD CREATED FOR SINGLY OBSERVED STORM ', - 7 ' 3: STORM IS NOT IN A BASIN DEFINED BY BASNCK ', - 8 ' 4: RSMC IS NOT AMONG LISTED CENTERS (NO ERROR RECOVERY) ', - 9 ' 5: DIFFERENT RSMCS REPORTED DIFFERENT OVERLAPPING STORMS ', - O ' 6: SINGLE RSMC HAS TWO STORM IDS FOR THE SAME STORM '/ - -C ERROR CODES FOR BAD RECORDS RETURNED IN IFRSMC ARE AS FOLLOWS: -C 1: CANNOT RESOLVE: SAME RSMC REPORTED OVERLAPPING STORMS -C 10: RESOLVED: SAME RSMC REPORTED OVERLAPPING STORMS -C 2: CANNOT RESOLVE: DIFF. RSMCS REPORTED DIFF. OVERL. STMS. -C 21: DIFFERENT RSMCS REPORTED SAME OVERLAPPING STORMS (CUR) -C 22: DIFFERENT RSMCS REPORTED SAME OVERLAPPING STORMS (OSTH) -C 30: UNIFIED RECORD CREATED FOR SINGLY OBSERVED STORM -C 3: STORM IS NOT IN A BASIN DEFINED BY BASNCK -C 4: RSMC IS NOT AMONG LISTED CENTERS (NO ERROR RECOVERY) -C 5: TWO DIFFERENT RSMCS REPORT DIFFERENT OVERLAPPING STORMS -C 6: SINGLE RSMC HAS TWO STORM IDS FOR THE SAME STORM - - WRITE(6,1) NTEST,NOKAY,NBAD - 1 FORMAT(//'...ENTERING RSMCCK, LOOKING FOR MULTIPLE STORM ', - 1 'REPORTS. NTEST,NOKAY,NBAD=',3I5/) - - CALL WRNING('RSMCCK') - WRITE(6,3) NVSBRS,(NAMVAR(IVSBRS(NV)),NV=1,NVSBRS) - 3 FORMAT(/'...NUMBER OF ALLOWABLE VARIABLES FOR SUBSTITUTION ', - 1 'IS:',I3,' VARIABLES ARE:'/4X,10(A,1X)) - - NADD=0 - NSUBR=0 - NUNIFY=0 - NALADD=0 - REWIND IUNTAN - OVRREC(1:NTEST)=' ' - IOVRLP(1:NTEST)=0 - IFRSMC(NUMTST(1:NTEST))=0 - -C FOR COMPLETE COTEMPORANEOUS CHECKS, WE MUST MAKE AVAILABLE THE -C ORIGINAL SHORT-TERM HISTORY RECORDS. WE STORE THEM AT THE END -C OF THE OVRREC ARRAY. - - REWIND IUNTHO - NRECHO=0 - WRITE(6,13) IUNTHO - 13 FORMAT(/'...READING FROM ORIGINAL SHORT-TERM HISTORY FILE ', - 1 '(UNIT',I3,') INTO SCRATCH SPACE: RECORD #, STORAGE ', - 2 'INDEX, RECORD=') - - 20 CONTINUE - - READ(IUNTHO,21,END=25) OVRREC(MAXOVR-NRECHO) - 21 FORMAT(A) - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - if(OVRREC(MAXOVR-NRECHO)(35:35).eq.'N' .or. - 1 OVRREC(MAXOVR-NRECHO)(35:35).eq.'S') then - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',OVRREC(MAXOVR-NRECHO)(20:21),'"' - PRINT *, ' ' - PRINT *, 'From unit ',iuntho,'; OVRREC(MAXOVR-NRECHO)-2: ', - $ OVRREC(MAXOVR-NRECHO) - PRINT *, ' ' - DUMY2K(1:19) = OVRREC(MAXOVR-NRECHO)(1:19) - IF(OVRREC(MAXOVR-NRECHO)(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = OVRREC(MAXOVR-NRECHO)(20:100) - OVRREC(MAXOVR-NRECHO) = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ OVRREC(MAXOVR-NRECHO)(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT *, 'From unit ',iuntho,'; OVRREC(MAXOVR-NRECHO)-2: ', - $ OVRREC(MAXOVR-NRECHO) - PRINT *, ' ' - - ELSE IF(OVRREC(MAXOVR-NRECHO)(37:37).eq.'N' .OR. - 1 OVRREC(MAXOVR-NRECHO)(37:37).eq.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT '(a,a,a)', '==> Read in RECORD from tcvitals file -- ', - $ ' contains a 4-digit year "',OVRREC(MAXOVR-NRECHO)(20:23),'"' - PRINT *, ' ' - PRINT '(a,i2,a,a)', - $ 'From unit ',iuntho,'; OVRREC(MAXOVR-NRECHO)-2: ', - $ OVRREC(MAXOVR-NRECHO) - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 20 - - END IF - - WRITE(6,23) NTEST+NRECHO+1,MAXOVR-NRECHO,OVRREC(MAXOVR-NRECHO) - 23 FORMAT(' ...',I4,'...',I4,'...',A) - NRECHO=NRECHO+1 - - IF(NRECHO .GE. MAXOVR-NTEST) THEN - WRITE(6,24) NRECHO,MAXOVR,NTEST - 24 FORMAT(/'******INSUFFICIENT SCRATCH SPACE TO STORE ORIGINAL ', - 1 'SHORT-TERM HISTORICAL RECORDS IN OVRREC. NRECHO,', - 2 'MAXOVR,NTEST=',3I3) - CALL ABORT1(' RSMCCK',24) - ENDIF - - GO TO 20 - 25 CONTINUE - WRITE(6,26) NRECHO - 26 FORMAT(' ...',I3,' RECORDS READ FROM ORIGINAL SHORT-TERM ', - 1 'HISTORY FILE.') - -C PART I: -C CHECK COTEMPORANEOUS RECORDS FOR STORMS WITHIN EACH OTHER'S RMAX - - WRITE(6,27) - 27 FORMAT(//'...BEGINNING RSMCCK PART I: COTEMPORANEOUS CHECKS FOR ', - 1 'OVERLAPPING STORMS.') - - DO NREC=1,NTEST - - IETYP=0 - IEROVR=0 - NOVRLP=1 - NRECSV=NREC - -C RECORDS THAT WERE PROCESSED AS COTEMPORANEOUS OVERLAPS PREVIOUSLY -C DO NOT GET FURTHER PROCESSING - - IF(IFRSMC(NUMTST(NREC)) .NE. 0) GO TO 400 - -C RECOVER DATE, UTC, LAT/LON AND RMAX - - BUFINZ=TSTREC(NREC) - - DO IV=1,MAX(9,IVSBRS(NVSBRS)) - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 TSTREC(NREC)) - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) - ENDDO - - VITVAR(3:MAX(9,IVSBRS(NVSBRS)))= - $ REAL(IVTVAR(3:MAX(9,IVSBRS(NVSBRS))))* - $ VITFAC(3:MAX(9,IVSBRS(NVSBRS))) - IF(LATNS .EQ. 'S') STMLTZ=-STMLTZ - IF(LONEW .EQ. 'W') STMLNZ=360.-STMLNZ - -C STORE NEEDED VARIABLES FOR LATER REFERENCE - - STMNAM(1)=STMNMZ - STMID (1)=STMIDZ - RSMC (1)=RSMCZ - STMLAT(1)=STMLTZ - STMLON(1)=STMLNZ - RMAX (1)=RMAXZ - PCEN (1)=PCENZ - PENV (1)=PENVZ - IOVRLP(1)=NREC - OVRREC(1)=BUFINZ - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - - IF(RMAXZ .LT. 0.0) THEN - DO NBA=1,NBASIN - IF(STMIDZ(3:3) .EQ. IDBASN(NBA)) THEN - IBASN=NBA - GO TO 46 - ENDIF - ENDDO - 46 CONTINUE - RMAXZ=TCCLIM(9,IBASN) - WRITE(6,47) NREC,RMAXZ,NABASN(IBASN) - 47 FORMAT(' ###RMAXZ MISSING FOR COTEMPORANEOUS CHECK ON RECORD',I3, - 1 '.'/4X,'REPLACEMENT VALUE WILL BE A CLIMATOLOGICAL ', - 2 'GUESS OF ',F6.1,' KM FOR BASIN ',A,'.') - ENDIF - -C NOW COMPARE WITH ALL REMAINING STORM REPORTS THAT HAVE NOT BEEN -C MARKED OFF AS ERRONEOUS - - NRECHZ=-1 - DO NTST=NREC+1,NTEST+NRECHO - - IF(NTST .LE. NTEST .AND. IFRSMC(NUMTST(NTST)) .NE. 0) GO TO 100 - - IF(NTST .LE. NTEST) THEN - INDTST=NTST - BUFINX=TSTREC(NTST) - OSTHFL=.FALSE. - ELSE - NRECHZ=NRECHZ+1 - INDTST=MAXOVR-NRECHZ - BUFINX=OVRREC(INDTST) - OSTHFL=.TRUE. - ENDIF - - DO IV=1,MAX(9,IVSBRS(NVSBRS)) - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVRX(IV),IERDEC,FMTVIT(IV), - 1 BUFINX) - ENDDO - - VITVRX(3:MAX(9,IVSBRS(NVSBRS)))= - $ REAL(IVTVRX(3:MAX(9,IVSBRS(NVSBRS))))* - $ VITFAC(3:MAX(9,IVSBRS(NVSBRS))) - - IF(LATNSX .EQ. 'S') STMLTX=-STMLTX - IF(LONEWX .EQ. 'W') STMLNX=360.-STMLNX - -C COTEMPORANEOUS CHECK - - IF(IDATEX .EQ. IDATEZ .AND. IUTCX .EQ. IUTCZ) THEN - - RMAXSV=RMAXX - IF(RMAXX .LT. 0.0) THEN - DO NBA=1,NBASIN - IF(STMIDX(3:3) .EQ. IDBASN(NBA)) THEN - IBASN=NBA - GO TO 66 - ENDIF - ENDDO - 66 CONTINUE - RMAXX=TCCLIM(9,IBASN) - WRITE(6,75) NTST,RMAXX,NABASN(IBASN) - 75 FORMAT(' ###RMAXX MISSING FOR COTEMPORANEOUS CHECK ON RECORD',I3, - 1 '.'/4X,'REPLACEMENT VALUE WILL BE A CLIMATOLOGICAL ', - 2 'GUESS OF ',F6.1,' KM FOR BASIN ',A,'.') - ENDIF - - DISTZ=DISTSP(STMLTZ,STMLNZ,STMLTX,STMLNX)*1.E-3 - -C OVERLAP CHECK. BUFFER ZONE CORRESPONDS TO SYNDATA CONDITION - - IF(DISTZ .LE. RMAXZ+RMAXX+BUFZON*DEGLAT) THEN - -C IF THE MATCHING RECORD IS FROM THE SAME RSMC AND THE STORM -C ID IS THE SAME AND THE RECORD WAS IN THE ORIGINAL SHORT-TERM -C HISTORY FILE, WE ASSUME THE RECORD (NREC) IS AN UPDATE TO THE -C EARLIER RECORD. THE ERROR FLAG IS RESET TO INDICATE NO ERROR. - - IF(RSMCZ .EQ. RSMCX .AND. - 1 STMIDZ .EQ. STMIDX .AND. OSTHFL) THEN - WRITE(6,76) NREC,INDTST,NREC,BUFINZ,INDTST,BUFINX - 76 FORMAT(/'###RECORD IN ORIGINAL SHORT-TERM HISTORY FILE HAS ', - 1 'PROBABLY BEEN UPDATED . NREC,INDTST=',2I4, - 2 '. RECORDS ARE:'/2(4X,'...',I4,'...',A/)) - GO TO 100 - - ELSE - -C STORE NEEDED VARIABLES FOR LATER REFERENCE. DON'T USE THE -C CLIMATOLOGICAL VALUE! - - NOVRLP=NOVRLP+1 - IOVRLP(NOVRLP)=NTST - OVRREC(NOVRLP)=BUFINX - STMNAM(NOVRLP)=STMNMX - STMID (NOVRLP)=STMIDX - RSMC (NOVRLP)=RSMCX - STMLAT(NOVRLP)=STMLTX - STMLON(NOVRLP)=STMLNX - RMAX (NOVRLP)=RMAXSV - PCEN (NOVRLP)=PCENX - PENV (NOVRLP)=PENVX - - WRITE(6,77) DISTZ,NREC,NTST,INDTST,BUFINZ,BUFINX - 77 FORMAT(//'...TWO STORMS REPORTED AT THE SAME DATE/TIME WITHIN ', - 1 'THE OTHERS CIRCULATION. DISTZ,NREC,NTST,INDTST=',F7.1,2 - 2 I4,I5/2(4X,'...',A,'...'/)) - -C SAME OR DIFFERENT RSMC? - - IF(RSMCZ .EQ. RSMCX) THEN - IETYP=1 - ELSE - IETYP=2 - ENDIF - - IF(NOVRLP .EQ. 2) THEN - IEROVR=IETYP - - ELSE - IF(IETYP .NE. IEROVR) THEN - IOVRLP(NOVRLP)=-IABS(IOVRLP(NOVRLP)) - WRITE(6,71) NREC,NTST - 71 FORMAT(' ###WARNING: MULTIPLE OVERLAP TYPES FOR NREC=',I3/4X, - 1 'ERROR RECOVERY CURRENTLY WORKS ON A SINGLE OVERLAP TYPE ', - 2 'SO THIS RECORD=#',I3,' WILL BE AUTOMATICALLY DISCARDED.') - ENDIF - ENDIF - - ENDIF - ENDIF - ENDIF - 100 CONTINUE - ENDDO - IF(IETYP .EQ. 0) GO TO 390 - -C ERROR RECOVERY FOR PART I: - - WRITE(6,103) NREC,IEROVR,NOVRLP-1,(IOVRLP(NOVR),NOVR=2,NOVRLP) - 103 FORMAT(' ...SUMMARY OF OVERLAPS FOR NREC=',I3,'. OVERLAP ', - 1 'TYPE=',I3,' AND NUMBER OF OVERLAPS=',I3, - 2 ' OVERLAP INDICES ARE:'/4X,'(NEGATIVE OVERLAP ', - 3 'INDICES MEAN THAT THE OVERLAP TYPE DIFFERS FROM ', - 4 'THE PRIMARY ONE WHICH IS IEROVR)'/4X,10I3) - -C **************************************************** -C **************************************************** -C **** **** -C **** MULTIPLE REPORTS BY THE SAME INSTITUTION **** -C **** **** -C **************************************************** -C **************************************************** - - IF(IEROVR .EQ. 1) THEN - IVR=9 - WRITE(6,107) IETYP - 107 FORMAT(' ******STORMS ARE REPORTED BY THE SAME RSMC, WHICH ', - 1 'IS A LOGICAL ERROR. IETYP=',I2/4X,'WE PROCEED TO ', - 2 'RECOVER THIS ERROR BY REDUCING THE RMAX OF THE LARGEST ', - 3 'STORM SO THAT OVERLAP WILL NOT OCCUR.') - - IF(NOVRLP .GT. 2) WRITE(6,109) - 109 FORMAT(' ###WARNING, NOVRLP > 2 SO THAT PROCESSING WILL ', - 1 'OCCUR FOR ONLY THE LARGEST AND SMALLEST STORMS. ', - 2 'OTHERS WILL BE AUTOMATICALLY MARKED ERRONEOUS.') - -C PICK OUT THE LARGEST AND SMALLEST STORMS - - INDXZ=1 - INDXX=1 - RMAXZ=RMAX(1) - RMAXX=RMAX(1) - DO NOVR=2,NOVRLP - IF(IOVRLP(NOVR) .GT. 0) THEN - IF(RMAX(NOVR) .GT. RMAXZ) THEN - RMAXZ=RMAX(NOVR) - INDXZ=NOVR - ENDIF - IF(RMAX(NOVR) .LT. RMAXX) THEN - RMAXX=RMAX(NOVR) - INDXX=NOVR - ENDIF - ENDIF - ENDDO - - DISTZX=DISTSP(STMLAT(INDXZ),STMLON(INDXZ), - 1 STMLAT(INDXX),STMLON(INDXX))*1.E-3 - EXCESS=RMAXZ+RMAXX+BUFZON*DEGLAT-DISTZX - WRITE(6,121) INDXZ,INDXX,STMID(INDXZ),RMAXZ,STMID(INDXX),RMAXX, - 1 DISTZX,EXCESS - 121 FORMAT('...INDXZ,INDXX,STMID(INDXZ),RMAX(INDXZ),STMID(INDXX),', - 1 'RMAX(INDXX)=',2I3,2(1X,A,F7.1),' DISTZX,EXCESS=',2F9.1) - RMAXZT=RMAXZ-EXCESS - -C RECOVERY METHOD 1: SUBTRACT EXCESS FROM LARGEST RMAX BUT MAINTAIN -C RELATIVE SIZE - - IF(RMAXZT .GT. RMAXX) THEN - WRITE(OVRREC(INDXZ)(ISTVAR(IVR):IENVAR(IVR)),FMTVIT(IVR)) - 1 NINT(RMAXZT) - OVRREC(INDXZ)(ISTVAR(IVR)-1:ISTVAR(IVR)-1)='O' - OVRREC(INDXX)=TSTREC(IOVRLP(INDXX)) - WRITE(6,123) IOVRLP(INDXZ),RMAXZ,RMAXZT,INDXZ,OVRREC(INDXZ) - 123 FORMAT(' ###IMPORTANT NOTE: FOR RECORD',I3,' RMAXZ=',F7.1, - 1 ' WILL BE SUBSTITUTED BY RMAXZT=',F7.1,' FOR INDXZ=',I3, - 2 '. AFTER SUBSTITUTION, OVRREC='/4X,A) - IETYP=-10 - -C RECOVERY METHOD 2: SUBTRACT HALF THE EXCESS FROM EACH RMAX - - ELSE - WRITE(6,125) - 125 FORMAT('...UNABLE TO MAINTAIN RMAXZ>RMAXX. HALF THE ', - 1 'EXCESS WILL BE SUBTRACTED FROM EACH REPORT.') - RMAXZT=RMAXZ-0.5*EXCESS - RMAXXT=RMAXX-0.5*EXCESS - IF(RMAXZT .GE. RMAXMN .AND. RMAXXT .GE. RMAXMN) THEN - WRITE(OVRREC(INDXZ)(ISTVAR(IVR):IENVAR(IVR)),FMTVIT(IVR)) - 1 NINT(RMAXZT) - WRITE(OVRREC(INDXX)(ISTVAR(IVR):IENVAR(IVR)),FMTVIT(IVR)) - 1 NINT(RMAXXT) - OVRREC(INDXX)(ISTVAR(IVR)-1:ISTVAR(IVR)-1)='O' - WRITE(6,123) IOVRLP(INDXZ),RMAXZ,RMAXZT,INDXZ,OVRREC(INDXZ) - WRITE(6,127) IOVRLP(INDXX),RMAXX,RMAXXT,IOVRLP(INDXX), - 1 OVRREC(INDXX) - 127 FORMAT(' ###IMPORTANT NOTE: FOR RECORD',I3,' RMAXX=',F7.1, - 1 ' WILL BE SUBSTITUTED BY RMAXXT=',F7.1,' FOR INDXX=',I3, - 2 '. AFTER SUBSTITUTION, OVRREC='/4X,A) - IETYP=-10 - - ELSE - WRITE(6,129) RMAXZT,RMAXXT,RMAXMN - 129 FORMAT(' ******RMAXZ AND RMAXX REDUCTION METHODS HAVE FAILED. ', - 1 'RMAXZT,RMAXXT=',2F7.1,' < RMAXMN=',F7.1) - ENDIF - ENDIF - - DO NOVR=1,NOVRLP - -C ASSIGN ERROR FLAGS AND UPDATE RECORDS FOR THE TWO RECORDS -C THAT WE TRIED TO CORRECT - - IF(NOVR .EQ. INDXZ .OR. NOVR .EQ. INDXX) THEN - IFRSMC(NUMTST(IOVRLP(NOVR)))=IETYP - IF(IETYP .GT. 0) THEN - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(IOVRLP(NOVR)) - BADREC(NADD+NBAD)=TSTREC(IOVRLP(NOVR)) - ELSE - NOKAY=NOKAY+1 - NUMOKA(NOKAY)=NUMTST(IOVRLP(NOVR)) - OKAREC(NOKAY)=OVRREC(NOVR) - ENDIF - -C ASSIGN ERROR FLAGS TO ALL OTHER RECORDS - - ELSE - IFRSMC(NUMTST(IOVRLP(NOVR)))=IETYP - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(IOVRLP(NOVR)) - BADREC(NADD+NBAD)=TSTREC(IOVRLP(NOVR)) - ENDIF - ENDDO - GO TO 400 - -C *************************************************** -C *************************************************** -C **** **** -C **** MULTIPLE REPORTS BY TWO DIFFERENT RSMCS **** -C **** **** -C *************************************************** -C *************************************************** - - ELSE IF(IEROVR .EQ. 2) THEN - WRITE(6,201) IETYP - 201 FORMAT('...STORMS ARE REPORTED BY DIFFERENT RSMCS. ', - 1 'WE PROCEED TO SEE IF THEY ARE THE SAME STORM BY ', - 2 'COMPARING NAMES.'/4X,'THEN WE CONSTRUCT A COMMON ', - 3 'STORM ID. PRELIMINARY IETYP=',I2) - - BUFINZ=OVRREC(1) - - NERROR=0 - DO NOVR=2,NOVRLP - IF(STMNAM(NOVR) .EQ. 'NAMELESS' .AND. - 1 STMNMZ .EQ. 'NAMELESS') THEN - WRITE(6,202) STMIDZ,RSMCZ,STMID(NOVR),RSMC(NOVR) - 202 FORMAT(' ###OVERLAPPING NAMELESS STORMS HAVE IDS AND RSMCS=', - 1 2(2(A,1X),2X)) - - ELSE IF(STMNAM(NOVR) .EQ. STMNMZ) THEN - WRITE(6,203) STMNAM(NOVR),NOVR - 203 FORMAT('...STORM NAME=',A,' FOR NOVR=',I3,' MATCHES FIRST ', - 1 'REPORT. THE STORMS ARE THE SAME.') - - ELSE - -C IF ONE RSMC REPORTS A NAMELESS STORM AND THE OTHER RSMCS REPORT -C A NAME, TRANSFER THE STORM NAME TO THE NAMELESS RECORD. - - IF(STMNMZ .EQ. 'NAMELESS') THEN - WRITE(6,205) STMNAM(NOVR),NOVR - 205 FORMAT('...STMNMZ IS NAMELESS. COPYING STMNAM(NOVR)=',A,' TO ', - 1 'STMNMZ. NOVR=',I3) - STMNAM(1)=STMNAM(NOVR) - STMNMZ=STMNAM(NOVR) - OVRREC(1)=BUFINZ - - IF(IOVRLP(1) .LE. NTEST) TSTREC(IOVRLP(1))=BUFINZ - - ELSE IF(STMNAM(NOVR) .EQ. 'NAMELESS') THEN - WRITE(6,207) STMNMZ,NOVR - 207 FORMAT('...STMNAM(NOVR) IS NAMELESS. COPYING STMNMZ=',A,' TO ', - 1 'STMNAM(NOVR). NOVR=',I3) - STMNAM(NOVR)=STMNMZ - BUFINX=OVRREC(NOVR) - STMNMX=STMNMZ - OVRREC(NOVR)=BUFINX - - IF(IOVRLP(NOVR) .LE. NTEST) TSTREC(IOVRLP(NOVR))=BUFINX - -C THERE ARE TWO NAMES, NEITHER OF WHICH IS NAMELESS. THUS THERE IS -C AN UNTREATABLE ERROR - - ELSE - IETYP=5 - NERROR=NERROR+1 - IOVRLP(NOVR)=-IABS(IOVRLP(NOVR)) - WRITE(6,209) NOVR,STMNAM(NOVR),STMNMZ,IETYP - 209 FORMAT(/'******FOR NOVR=',I3,' STORM NAME=',A,' DOES NOT MATCH ', - 1 'NAME FOR THE FIRST REPORT=',A,'.'/4X,' THERE IS NO ', - 2 'ERROR RECOVERY AT THIS TIME. IETYP=',I3) - -C ERROR MARKING OFF ON THE FLY HERE - - IFRSMC(NUMTST(IABS(IOVRLP(NOVR))))=IETYP - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(IABS(IOVRLP(NOVR))) - BADREC(NADD+NBAD)=TSTREC(IABS(IOVRLP(NOVR))) - IETYP=IEROVR - ENDIF - ENDIF - ENDDO - -C IF AN ERROR HAS OCCURRED IN THE PREVIOUS PROCESSING REMOVE -C THE ERRONEOUS RECORD FROM THE OVERLAP LIST AND CONTINUE - - IF(NERROR .NE. 0) THEN - NOVRZ=0 - WRITE(6,213) NERROR - 213 FORMAT(' ******',I3,' ERRORS FOUND DURING STORM NAME MATCHING.') - DO NOVR=1,NOVRLP - IF(IOVRLP(NOVR) .GE. 0 .AND. IOVRLP(NOVR) .LE. NTEST) THEN - NOVRZ=NOVRZ+1 - IOVRLP(NOVRZ)=IOVRLP(NOVR) - OVRREC(NOVRZ)=OVRREC(NOVR) - STMNAM(NOVRZ)=STMNAM(NOVR) - STMID (NOVRZ)=STMID(NOVR) - RSMC (NOVRZ)=RSMC(NOVR) - STMLAT(NOVRZ)=STMLAT(NOVR) - STMLON(NOVRZ)=STMLON(NOVR) - RMAX (NOVRZ)=RMAX(NOVR) - PCEN (NOVRZ)=PCEN(NOVR) - PENV (NOVRZ)=PENV(NOVR) - ENDIF - ENDDO - NOVRLP=NOVRZ - IF(NOVRLP .EQ. 1) GO TO 390 - ENDIF - - WRITE(6,221) - 221 FORMAT(' ...THE OBSERVING RSMCS, THEIR ABBREVIATIONS, ', - 1 'PRIORITIES, INDICES AND REPORTED BASINS ARE:'/11X, - 2 'RSMC',3X,'RSMCAP',3X,'PRIORITY',3X,'INDEX',3X,'BASIN',3X, - 3 'BSCOFL',3X,'RPCOFL') - - NERROR=0 - DO NOVR=1,NOVRLP - -C WHICH BASIN ARE WE IN? - - CALL BASNCK(STMID(NOVR),STMLAT(NOVR),STMLON(NOVR),NBA,IPRT,IER) - IF(IER .EQ. 11) THEN - BSCOFL='IB' - ELSE - BSCOFL='CB' - ENDIF - - IF(IER .EQ. 3) THEN - IETYP=IER - NERROR=NERROR+1 - IOVRLP(NOVR)=-IABS(IOVRLP(NOVR)) - -C AGAIN, ERROR MARKING OFF ON THE FLY - - IFRSMC(NUMTST(IABS(IOVRLP(NOVR))))=IETYP - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(IABS(IOVRLP(NOVR))) - BADREC(NADD+NBAD)=TSTREC(IABS(IOVRLP(NOVR))) - IETYP=IEROVR - ENDIF - - IF(NOVR .EQ. 1) THEN - NBASV=NBA - RPCOFL='CR' - ELSE - IF(NBA .NE. NBASV) THEN - RPCOFL='IR' - NBA=NBASV - ENDIF - ENDIF - -C IS THIS A REPORT BY THE PRIORITY RSMC FOR THIS BASIN? THE -C PRIORITY FLAG IS TWO DIGITS. THE FIRST DIGIT IS PRIORITY -C (=1 IF THE RSMC IS THE PRIORITY RSMC, =2 OTHERWISE). THE -C SECOND DIGIT IS THE RSMC INDEX - - NRSPRI=RSMCPR(NBA) - NRSMC=-1 - DO NRSZ=1,NRSMCX - IF(RSMCID(NRSZ) .EQ. RSMC(NOVR)) THEN - NRSMC=NRSZ - IF(NRSMC .EQ. NRSPRI) THEN - IPRIOR(NOVR)=10+NRSMC - AVWT(NOVR)=RSMCWT(1) - BUFINZ=OVRREC(NOVR) - ELSE - IPRIOR(NOVR)=20+NRSMC - AVWT(NOVR)=RSMCWT(2) - ENDIF - GO TO 231 - ENDIF - ENDDO - 231 CONTINUE - - IF(NRSMC .GE. 0) THEN - WRITE(6,233) NOVR,RSMC(NOVR),RSMCAP(NRSMC),IPRIOR(NOVR),NRSMC, - 1 NBA,BSCOFL,RPCOFL - 233 FORMAT(' ',5X,I3,2X,A,6X,A,8X,I2,5X,I4,5X,I3,2(7X,A)) - - ELSE - IETYP=4 - NERROR=NERROR+1 - IOVRLP(NOVR)=-IABS(IOVRLP(NOVR)) - WRITE(6,235) RSMC(NOVR),NOVR,IETYP - 235 FORMAT('0******RSMC=',A,' COULD NOT BE FOUND IN RSMCCK. THIS ', - 1 'RECORD IS ERRONEOUS. NOVR=',I3,', IETYP=',I3) - -C AGAIN, ERROR MARKING OFF ON THE FLY - - IFRSMC(NUMTST(IABS(IOVRLP(NOVR))))=IETYP - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(IABS(IOVRLP(NOVR))) - BADREC(NADD+NBAD)=TSTREC(IABS(IOVRLP(NOVR))) - ENDIF - - ENDDO - -C IF AN ERROR HAS OCCURRED IN THE PREVIOUS PROCESSING REMOVE -C THE ERRONEOUS RECORD FROM THE OVERLAP LIST AND CONTINUE - - IF(NERROR .NE. 0) THEN - WRITE(6,243) NERROR - 243 FORMAT(' ******',I3,' ERRORS FOUND DURING RSMC VERIFICATION.') - NOVRZ=0 - DO NOVR=1,NOVRLP - IF(IOVRLP(NOVR) .GE. 0 .AND. IOVRLP(NOVR) .LE. NTEST) THEN - NOVRZ=NOVRZ+1 - IOVRLP(NOVRZ)=IOVRLP(NOVR) - IPRIOR(NOVRZ)=IPRIOR(NOVR) - OVRREC(NOVRZ)=OVRREC(NOVR) - STMNAM(NOVRZ)=STMNAM(NOVR) - STMID (NOVRZ)=STMID(NOVR) - RSMC (NOVRZ)=RSMC(NOVR) - STMLAT(NOVRZ)=STMLAT(NOVR) - STMLON(NOVRZ)=STMLON(NOVR) - RMAX (NOVRZ)=RMAX(NOVR) - PCEN (NOVRZ)=PCEN(NOVR) - PENV (NOVRZ)=PENV(NOVR) - AVWT (NOVRZ)=AVWT(NOVR) - ENDIF - ENDDO - NOVRLP=NOVRZ - IF(NOVRLP .EQ. 1) GO TO 390 - ENDIF - - WRITE(6,251) NOVRLP - 251 FORMAT(6X,'KEY: BSCOFL=IB IF REPORTED LAT/LON AND BASIN ', - 1 'ID FROM STORM ID ARE INCONSISTENT.'/18X,'=CB IF ', - 2 'LAT/LON AND BASIN ID ARE CONSISTENT.'/12X,'RPCOFL=', - 3 'CR IF REPORTED BASIN IS THE SAME AS THE FIRST RECORD.' - 4 /18X,'=IR IF REPORTED BASIN IS DIFFERENT FROM THE FIRST ', - 5 'RECORD.'/4X,I3,' OVERLAPPING STORMS HAVE BEEN FOUND.') - -C CHECK THE ALIAS FILE FOR REPORTS UNDER OTHER NAMES - - DO NOVR=1,NOVRLP - NALIAS=0 - NALREC=0 - REWIND IUNTAL - WRITE(6,257) STMNAM(NOVR),STMID(NOVR) - 257 FORMAT(/'...CHECKING THE ALIAS FILE TRYING TO FIND STORM NAME ', - 1 'ID AND RSMC THAT MATCH',3(1X,A)) - - 260 READ(IUNTAL,261,END=300) NALMX,STMNMX,(RSMCAL(NAL),STIDAL(NAL), - 1 NAL=1,MIN(NALMX,NOVRMX)) - 261 FORMAT(I1,1X,A9,10(1X,A4,1X,A3)) - NALREC=NALREC+1 - IF(NOVR .EQ. 1) WRITE(6,267) NALREC,RSMCAL(1),STIDAL(1), - 1 NALMX-1,STMNMX,(RSMCAL(NAL),STIDAL(NAL),NAL=2,MIN(NALMX,NOVRMX)) - 267 FORMAT('...ALIAS RECORD',I3,'=',2(A,1X),' HAS ',I3,' OBSERVERS ', - 1 'AND NAME=',A,' OBSERVERS ARE:'/(14X,2(A,1X))) - -C WRITE(6,293) STMID(NOVR),STIDAL(NAL) -C 293 FORMAT('...CHECKING STORM IDS VERSUS ALIAS FILE. STMID(NOVR),', -C 1 'STIDAL(NAL)=',2(A,1X)) - - IFNDAL=0 - IF(STMNMX .NE. 'NAMELESS' .AND. STMNAM(NOVR) .EQ. STMNMX .AND. - 1 STMID(NOVR)(3:3) .EQ. STIDAL(1)(3:3)) THEN - IFNDAL=1 - WRITE(6,294) STMNMX,STIDAL(1)(3:3) - 294 FORMAT('...EXACT NAME AND BASIN MATCH FOR NAMED STORM=',A,' IN ', - 1 'BASIN ',A,' IN THE ALIAS FILE.') - - ELSE - DO NALZZ=2,MIN(NALMX,NOVRMX) - IF(STMID(NOVR) .EQ. STIDAL(NALZZ) .AND. - 1 RSMC(NOVR) .EQ. RSMCAL(NALZZ)) THEN - IFNDAL=1 - WRITE(6,295) STMNMX,STIDAL(NALZZ),RSMC(NALZZ) - 295 FORMAT('...STORM ID AND RSMC MATCH FOR STORM=',A,' IN THE ', - 1 'ALIAS FILE. ID,RSMC=',2(A,1X)) - ENDIF - ENDDO - ENDIF - - IF(IFNDAL .EQ. 1) THEN - NALIAS=NALMX-1 - -C CHECK THAT THE OBSERVING RSMCS IN THE ALIAS FILE ARE AT LEAST -C THOSE OBSERVING FOR THIS CASE - - NOFIND=0 - DO NOVRZ=1,NOVRLP - DO NALZ=2,MIN(NALMX,NOVRMX) - IF(RSMC(NOVRZ) .EQ. RSMCAL(NALZ)) THEN - NOFIND=0 - GO TO 2294 - ELSE - NOFIND=NOFIND+1 - ENDIF - ENDDO - 2294 CONTINUE - IF(NOFIND .GT. 0) GO TO 2298 - ENDDO - - 2298 IF(NOFIND .EQ. 0) THEN - RSMCZ=RSMCAL(1) - STMIDZ=STIDAL(1) - -C RESET NALIAS TO FORCE A NEW COMBINED RSMC IF THE OBSERVING -C RSMCS AREN'T ON THE ALIAS FILE - - ELSE - WRITE(6,297) - 297 FORMAT('...RESETTING NALIAS=0 TO FORCE NEW ALIAS RECORD ', - 1 'BECAUSE A NEW RSMC HAS OBSERVED THIS STORM.') - NALIAS=0 - ENDIF - GO TO 301 - ENDIF - GO TO 260 - 300 CONTINUE - ENDDO - 301 CONTINUE - -C CONSTRUCT AND WRITE A NEW COMBINED RSMC IF NECESSARY - - IF(NALIAS .EQ. 0) THEN - IF(NALREC .EQ. 0) WRITE(6,303) - 303 FORMAT(/'...THE ALIAS FILE IS EMPTY. WE WILL ADD A NEW ALIAS.') - - IF(IFNDAL .EQ. 0) THEN - RSMCZ='!'//RSMCAP(NRSPRI) - WRITE(6,343) NRSPRI,RSMCAP(NRSPRI),RSMCZ - 343 FORMAT('...CONSTRUCTING NEW COMBINED RSMC FROM PRIORITY RSMC. ', - 1 'NRSPRI,','RSMCAP(NRSPRI),RSMCZ=',I4,2(1X,'...',A,'...')) - NSUB=0 - DO NOVZ=1,MIN0(NOVRLP,3) - IF(IPRIOR(NOVZ)/10 .NE. 1) THEN - NSUB=NSUB+1 - RSMCZ(2+NSUB:2+NSUB)=RSMCAP(IPRIOR(NOVZ)-10*(IPRIOR(NOVZ)/10)) - WRITE(6,349) RSMCZ(2+NSUB:2+NSUB),RSMCZ - 349 FORMAT('...ADDING RSMCAP=',A,', RSMCZ=',A) - ENDIF - ENDDO - - NSUB=1 - DO NOVZ=1,MIN(NOVRLP,NOVRMX-1) - NSUB=NSUB+1 - RSMCAL(NSUB)=RSMC(NOVZ) - STIDAL(NSUB)=STMID(NOVZ) - IF(IPRIOR(NOVZ)/10 .EQ. 1) THEN - RSMCAL(1)=RSMCZ - STIDAL(1)=STMIDZ - ENDIF - ENDDO - NOVRAD=NOVRLP+1 - -C CHECK THE CHOICE OF STORM ID VERSUS THE CATALOG. MAKE ANOTHER -C CHOICE IF THE FIRST CHOICE IS TAKEN. - - WRITE(6,361) STIDAL(1),(STMID(NOVZ),RSMC(NOVZ),NOVZ=1,NOVRLP) - 361 FORMAT('...CHECKING THE CATALOG TO SEE THE IF STORM IS IN ', - 1 'THERE. FIRST CHOICE IS: ',A/4X, - 2 'POSSIBLE IDS AND RSMCS ARE:'/(14X,2(A,2X))) - - read(stidal(1)(1:2),3333) minid - 3333 format(i2.2) - write(6,3334) minid - 3334 FORMAT('...ID OF FIRST CHOICE STORM ID=',I3) - - do novz=1,novrlp - call stcati(iuntca,stmid(novz),rsmc(novz),stmidx,ifnd) - if(ifnd .eq. 1) then - stidal(1)=stmidx - write(6,3335) stidal(1) - 3335 format('...Eureka, this storm is in the catalog with id=',a) - go to 3341 - - else - -c Pick out the maximum storm id from the priority basin - - if(stmid(novz)(3:3) .eq. stidal(1)(3:3)) then - read(stmid(novz)(1:2),3333) minidz - minid=max0(minid,minidz) - endif - - endif - enddo - 3341 continue - - if(ifnd .eq. 0) then - write(stidal(1)(1:2),3333) minid - write(6,3351) stidal(1) - 3351 format('...This storm is not in the catalog. Assign a unique ', - 1 'id that is the smallest for the overlapping storms=',a) - endif - stmidz=stidal(1) - - ELSE - WRITE(6,3357) RSMCAL(1),STIDAL(1),NALMX,(RSMCAL(NN), - 1 STIDAL(NN),NN=2,NALMX) - 3357 FORMAT('...COPYING RSMC =(',A,') AND STORM ID =(',A,') FROM ', - 1 'ALIAS FILE AND ADDING NEW RSMCS.'/4X,'NEW RSMCS AND ', - 2 'STORM IDS WILL NOW BE ADDED. CURRENT NUMBER IS',I3, - 3 ' OTHER RSMCS, STORM IDS ARE:'/(10X,2(A,1X))) - -C ADD NEW RSMCS AND ALIASES AS APPROPRIATE - - NADDRS=0 - - DO NOVR=1,NOVRLP - - DO NRSZA=1,NRSMCX - IF(RSMCID(NRSZA) .EQ. RSMC(NOVR)) THEN - NRSAPA=NRSZA - WRITE(6,3359) NOVR,RSMC(NOVR),NRSAPA - 3359 FORMAT('...FOR OVERLAP RECORD',I3,' RSMC AND INDEX ARE ',A,I4) - GO TO 3361 - ENDIF - ENDDO - 3361 CONTINUE - - IADRMS=1 - LNRSMC=INDEX(RSMCAL(1),' ')-1 - DO LENG=2,LNRSMC - WRITE(6,3377) LENG,RSMCAL(1)(LENG:LENG),RSMCAP(NRSAPA) - 3377 FORMAT('...TRYING TO MATCH RSMC ON ALIAS RECORD WITH OVERLAP ', - 1 'RECORD, LENG,RSMCAL,RSMCAP=',I3,2(1X,A)) - IF(RSMCAL(1)(LENG:LENG) .EQ. RSMCAP(NRSAPA)) THEN - IADRMS=0 - ENDIF - ENDDO - - IF(IADRMS .GT. 0) THEN - NADDRS=NADDRS+1 - RSMCAL(1)(LNRSMC+NADDRS:LNRSMC+NADDRS)=RSMCAP(NRSAPA) - STIDAL(NALMX+NADDRS)=STMID(NOVR) - RSMCAL(NALMX+NADDRS)=RSMC(NOVR) - WRITE(6,3391) NADDRS,NALMX+NADDRS,RSMCAL(1) - 3391 FORMAT('...ADDING RSMC, NADDRS,NALMX+NADDRS,RSMCAL(1)=', - 1 2I4,1X,A) - ENDIF - ENDDO - NOVRAD=NALMX+NADDRS - STMIDZ=STIDAL(1) - RSMCZ=RSMCAL(1) - ENDIF - -C WRITE A NEW RECORD TO THE ALIAS FILE IF THERE ISN'T AN EARLIER -C ONE IN THE NEW ALIAS FILE ALREADY - - IFND=0 - DO NADDZ=1,NALADD - IF(STNMAD(NADDZ) .EQ. STMNAM(NOVR) .OR. - 1 (STIDAD(NADDZ) .EQ. STIDAL(1) .AND. - 2 RSMCAD(NADDZ) .EQ. RSMCAL(1)) .AND. - 3 DAYZ .GE. DAYZAD(NADDZ)) THEN - IFND=1 - GO TO 3661 - ENDIF - ENDDO - 3661 CONTINUE - - IF(IFND .EQ. 0) THEN - WRITE(6,3401) NOVRAD,NADDRS,RSMCAL(1),STIDAL(1),(RSMCAL(NN), - 1 STIDAL(NN),NN=2,NOVRAD) - 3401 FORMAT('...READY TO ADD MODIFIED ALIAS RECORD: NOVRAD,NADDRS,', - 1 'PRIMARY RSMC,STORM ID=',2I4,2(1X,A),' SECONDARY ', - 2 'RSMC, ID:'/(10X,2(A,1X))) - NALADD=NALADD+1 - STNMAD(NALADD)=STMNAM(1) - STIDAD(NALADD)=STIDAL(1) - RSMCAD(NALADD)=RSMCAL(1) - DAYZAD(NALADD)=DAYZ - NAKA=MIN(NOVRAD,NOVRMX) - CALL AKASAV(NALADD,NAKA,DAYZ,STNMAD(NALADD),RSMCAL,STIDAL) - ENDIF - - ENDIF - -C CALCULATE AVERAGE LAT/LON, RMAX -C THEN SUBSTITUTE THE STORM ID, RSMC, LAT/LON, RMAX - - WRITE(6,362) (NO,STMLAT(NO),STMLON(NO),RMAX(NO),PCEN(NO), - 1 PENV(NO),NO=1,NOVRLP) - 362 FORMAT(/'...READY FOR AVERAGING OVER COTEMPORANEOUS STORMS. ', - 1 9X,'LAT',5X,'LON',4X,'RMAX',4X,'PCEN',4X,'PENV ARE:' - 2 /(54X,I3,5F8.1)) - - CALL WTAVRG(STMLAT,AVWT,NOVRLP,STMLTZ) - CALL WTAVRG(STMLON,AVWT,NOVRLP,STMLNZ) - CALL WTAVGP(RMAX,AVWT,NOVRLP,RMAXZ) - CALL WTAVGP(PCEN,AVWT,NOVRLP,PCENZ) - CALL WTAVGP(PENV,AVWT,NOVRLP,PENVZ) - IF(STMLTZ .GE. 0) THEN - LATNS='N' - ELSE - LATNS='S' - STMLTZ=ABS(STMLTZ) - ENDIF - IF(STMLNZ .GT. 180.) THEN - LONEW='W' - ELSE - LONEW='E' - ENDIF - WRITE(6,363) LATNS,LONEW,STMLTZ,STMLNZ,RMAXZ,PCENZ,PENVZ - 363 FORMAT('...AVERAGE STORM VALUES ARE:',2X,'(LATNS,LONEW=',2A2,')' - 1 /57X,5F8.1) - - IF(NVSBRS .NE. 0) THEN - - DO IVR=1,NVSBRS - IVSB=IVSBRS(IVR) - IVTVAR(IVSB)=NINT(VITVAR(IVSB)/VITFAC(IVSB)) - ENDDO - - ELSE - WRITE(6,3364) - 3364 FORMAT(' ###THESE AVERAGE VALUES WILL NOT BE SUBSTITUTED.') - ENDIF - - WRITE(6,365) STMIDZ,RSMCZ - 365 FORMAT(' ...SUBSTITUTING COMBINED STORM ID=',A,' AND RSMC=',A, - 1 ' INTO OVERLAP RECORDS.',/,4X,'AFTER SUBSTITUTION, ', - 2 'INDEX, INPUT RECORD#, RECORD ARE : (~~ INDICATES ', - 3 'RECORD FROM ORIGINAL SHORT-TERM HISTORY FILE)') - ICURR=0 - DO NOVR=1,NOVRLP -C WRITE(6,367) NOVR,STMIDZ,RSMCZ,OVRREC(NOVR) -C 367 FORMAT('...BEFORE SUBSTITUTION,NOVR,STMIDZ,RSMCZ,OVRREC=', -C 1 I3,2(1X,A)/4X,A,'...') - -C COUNT THE NUMBER OF CURRENT OVERLAPPING RECORDS - - IF(IOVRLP(NOVR) .LE. NTEST) THEN - ICURR=ICURR+1 - STHCH=' ' - ELSE - STHCH='~~' - ENDIF - - BUFINX=OVRREC(NOVR) - STMIDX=STMIDZ - RSMCX=RSMCZ - LATNSX=LATNS - LONEWX=LONEW - OVRREC(NOVR)=BUFINX - DO IVR=1,NVSBRS - IVSB=IVSBRS(IVR) - WRITE(OVRREC(NOVR)(ISTVAR(IVSB):IENVAR(IVSB)),FMTVIT(IVSB)) - 1 IVTVAR(IVSB) - OVRREC(NOVR)(ISTVAR(IVSB)-1:ISTVAR(IVSB)-1)='A' - ENDDO - WRITE(6,369) NOVR,IOVRLP(NOVR),STHCH,OVRREC(NOVR) - 369 FORMAT(' ...',2I3,'...',A,'...',A,'...') - ENDDO - -C FINAL ASSIGNMENT OF ERROR CODE: -C =21 IF ALL OVERLAPPING RECORDS ARE CURRENT -C =22 IF ONE OF THE OVERLAPPING RECORDS WAS FROM THE ORIGINAL -C SHORT TERM HISTORY FILE. IN THIS CASE ITS TOO LATE TO USE -C THE CURRENT RECORD ANYWAY. - - IF(ICURR .EQ. NOVRLP) THEN - IETYP=IETYP*10+1 - ELSE - IETYP=IETYP*10+2 - ENDIF - -C ONLY RECORDS FROM THE CURRENT TEST ARRAY CAN BE SPLIT INTO OKAY -C AND BAD RECORDS. - - DO NOVR=1,NOVRLP - IF(IOVRLP(NOVR) .LE. NTEST) THEN - IFRSMC(NUMTST(IOVRLP(NOVR)))=IETYP - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(IOVRLP(NOVR)) - BADREC(NADD+NBAD)=TSTREC(IOVRLP(NOVR)) - IF(IETYP .NE. 0 .AND. IPRIOR(NOVR)/10 .EQ. 1) THEN - NSUBR=NSUBR+1 - NOKAY=NOKAY+1 - NUMOKA(NOKAY)=NUMTST(IOVRLP(NOVR)) - OKAREC(NOKAY)=OVRREC(NOVR) - ENDIF - ENDIF - ENDDO - - GO TO 400 - ENDIF - -C OTHER ERROR PROCESSING - - 390 CONTINUE - - IFRSMC(NUMTST(NRECSV))=IETYP - IF(IETYP .GT. 0) THEN - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(NRECSV) - BADREC(NADD+NBAD)=TSTREC(NRECSV) - ELSE - NOKAY=NOKAY+1 - NUMOKA(NOKAY)=NUMTST(NRECSV) - OKAREC(NOKAY)=TSTREC(NRECSV) - ENDIF - - 400 CONTINUE - ENDDO - -C DUMP ALIAS RECORDS TO NEW ALIAS FILE - - CALL AKADMP(IUNTAN) - - WRITE(6,401) - 401 FORMAT(//'...BEGINNING RSMCCK PART II: UNIFY STORM ID ACROSS ALL', - 1 ' CURRENT AND HISTORICAL OCCURRENCES.') - -C COPY ALIAS FILE (AKAVIT) TO NEW ALIAS FILE. DON'T COPY RECORDS -C THAT ALREADY EXIST IN NEW ALIAS FILE. - - REWIND IUNTAL - CALL AKACPY(IUNTAL,IUNTAN) - -C CHECK ALL RECORDS IN THE ALIAS SHORT-TERM HISTORY FILE VERSUS -C RECORDS THAT ARE OK SO FAR. FIRST, COPY ALL OKAY RECORDS -C INTO WORKING SPACE. - - NCHECK=NOKAY+1 - REWIND IUNTHA - WRITE(6,503) - 503 FORMAT(/'...COPYING OKAY RECORDS TO OVRREC ARRAY: RECORD #, ', - 1 'RECORD=') - DO NOK=1,NOKAY - IOVRLP(NOK)=0 - OVRREC(NOK)=OKAREC(NOK) - WRITE(6,505) NOK,OVRREC(NOK) - 505 FORMAT('...',I3,'...',A,'...') - ENDDO - WRITE(6,511) NOKAY - 511 FORMAT('...',I3,' OKAY RECORDS HAVE BEEN COPIED.') - - WRITE(6,513) IUNTHA - 513 FORMAT(/'...READING FROM ALIAS SHORT-TERM HISTORY FILE (UNIT',I3, - 1 ') INTO OVRREC ARRAY: RECORD #, RECORD='/4X,A) - - 520 CONTINUE - - READ(IUNTHA,521,END=540) OVRREC(NCHECK) - 521 FORMAT(A) - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - IF(OVRREC(NCHECK)(35:35).EQ.'N' .OR. - 1 OVRREC(NCHECK)(35:35).EQ.'S') THEN - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',OVRREC(NCHECK)(20:21),'"' - PRINT *, ' ' - PRINT *, 'From unit ',iuntha,'; OVRREC(NCHECK)-3: ', - $ OVRREC(NCHECK) - PRINT *, ' ' - DUMY2K(1:19) = OVRREC(NCHECK)(1:19) - IF(OVRREC(NCHECK)(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = OVRREC(NCHECK)(20:100) - OVRREC(NCHECK) = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ OVRREC(NCHECK)(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT *, 'From unit ',iuntha,'; OVRREC(NCHECK)-3: ', - $ OVRREC(NCHECK) - PRINT *, ' ' - - ELSE IF(OVRREC(NCHECK)(37:37).EQ.'N' .OR. - 1 OVRREC(NCHECK)(37:37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',OVRREC(NCHECK)(20:23),'"' - PRINT *, ' ' - PRINT *, 'From unit ',iuntha,'; OVRREC(NCHECK)-3: ', - $ OVRREC(NCHECK) - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 520 - - END IF - - IOVRLP(NCHECK)=0 - WRITE(6,505) NCHECK,OVRREC(NCHECK) - NCHECK=NCHECK+1 - GO TO 520 - - 540 CONTINUE - NCHECK=NCHECK-1 - WRITE(6,541) NCHECK-NOKAY - 541 FORMAT('...',I3,' SHORT-TERM HISTORY RECORDS HAVE BEEN READ.') - - REWIND IUNTAL - NALADD=0 - DO NOK=1,NOKAY - -C DO ONLY RECORDS THAT HAVE NOT BEEN PROCESSED PREVIOUSLY - - IF(IOVRLP(NOK) .LT. 0) GO TO 700 - BUFINZ=OKAREC(NOK) - WRITE(6,543) NOK,STMNMZ,STMIDZ,RSMCZ - 543 FORMAT(//'...READY TO CHECK OKAY RECORD',I3,' WITH STMNAM,ID,', - 1 'RSMC=',3(1X,A)) - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) - ENDDO - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - - IBANG=0 - NSAME=1 - STMID(NSAME)=STMIDZ - STMNAM(NSAME)=STMNMZ - RSMC (NSAME)=RSMCZ - IOVRLP(NOK)=-NOK - INDSAM(NSAME)=NOK - IDATE(NSAME)=IDATEZ - IUTC(NSAME)=IUTCZ - IDASRT(NSAME)=NSAME - SRTDAY(NSAME)=DAYZ - IF(RSMC(NSAME)(1:1) .EQ. '!') IBANG=NSAME - -C LOOK IN THE ALIAS FILE TO SEE IF THIS STORM HAS BEEN ALIASED -C BEFORE. - - NALSAV=NOVRMX - CALL AKAFND(IUNTAN,STMNMZ,RSMCZ,STMIDZ,NALSAV,STNMAL,RSMCAL, - 1 STIDAL,IFNDAL) - - IF(IFNDAL .NE. 0) THEN - NALMX=NALSAV - WRITE(6,557) STMNMZ,STMIDZ,NALMX - 557 FORMAT('...STORM NAME,ID=',2(1X,A),' HAS BEEN ASSIGNED AN ALIAS ', - 1 'NAME PREVIOUSLY.',I3,' ALIASES EXIST.') - ELSE - NALMX=1 - WRITE(6,559) STMNMZ - 559 FORMAT('...STORM ',A,' CANNOT BE FOUND IN THE ALIAS FILE.') - ENDIF - -C ACCUMULATE ALL OBSERVATIONAL REPORTS FOR THIS STORM. - - DO NCK=NOK+1,NCHECK - IF(IOVRLP(NCK) .GE. 0) THEN - IFNDX=0 - BUFINX=OVRREC(NCK) - -C NO MATCH FOR BOTH STORMS THAT ARE NAMED. - - IF(STMNMZ .NE. 'NAMELESS' .AND. STMNMX .NE. 'NAMELESS') THEN - IF(STMNMX .EQ. STMNMZ) then - if(STMIDX(3:3) .EQ. STMIDZ(3:3)) then - IFNDX=1 - else - icmat=0 - do nc=1,ncrdmx - if(stmnmx .eq. cardnm(nc)) icmat=1 - enddo - if(icmat .eq. 0) ifndx=1 - endif - endif - -C POSSIBLE MATCH REMAINS: MATCH STORM ID FOR THE SAME RSMC. IF -C STORM WAS IN ALIAS FILE, TRY TO MATCH ANY OF ITS ALIASES. IF -C STORM WAS NOT IN ALIAS FILE, TRY TO MATCH STORM ID AND RSMC. -C WARNING: THIS IS NOT A COMPLETE TEST!!! - - ELSE - IF(IFNDAL .NE. 0) THEN - - DO NAL=1,NALMX - IF(RSMCX .EQ. RSMCAL(NAL) .AND. STMIDX .EQ. STIDAL(NAL)) THEN - IFNDX=1 - GO TO 561 - ENDIF - ENDDO - - ELSE - IF(RSMCX .EQ. RSMCZ .AND. STMIDX .EQ. STMIDZ) THEN - IFNDX=1 - GO TO 561 - ENDIF - - ENDIF - - 561 CONTINUE - ENDIF - -C CONTINUE PROCESSING IF SAME STORM HAS BEEN FOUND. - - IF(IFNDX .NE. 0) THEN - - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVRX(IV),IERDEC,FMTVIT(IV), - 1 BUFINX) - ENDDO - CALL ZTIME(IDATEX,IUTCX,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYX) - -C CHECK FOR RECORDS THAT HAVE THE SAME DATE/TIME - - DO NSZ=1,NSAME - IF(ABS(DAYX-SRTDAY(NSZ)) .LT. FIVMIN) THEN - WRITE(6,567) NSZ,INDSAM(NSZ),BUFINX - 567 FORMAT('###RECORD HAS SAME DATE/TIME AS RECORD #',I3,' WHICH ', - 1 'IS INDEX#',I3,'. IT WILL NOT BE SAVED.',/,4X,A) - IOVRLP(NCK)=-999 - GO TO 570 - ENDIF - ENDDO - - NSAME=NSAME+1 - IDATE(NSAME)=IDATEX - IUTC(NSAME)=IUTCX - IOVRLP(NCK)=-NCK - INDSAM(NSAME)=NCK - STMID(NSAME)=STMIDX - STMNAM(NSAME)=STMNMX - RSMC (NSAME)=RSMCX - IDASRT(NSAME)=NSAME - SRTDAY(NSAME)=DAYX - IF(RSMC(NSAME)(1:1) .EQ. '!') IBANG=NSAME - - ENDIF - ENDIF - 570 CONTINUE - ENDDO - - WRITE(6,571) NSAME-1,STMNMZ,STMIDZ,(INDSAM(NS),NS=2,NSAME) - 571 FORMAT(/'...',I3,' MATCHING STORMS WERE FOUND FOR ',A,' WITH ', - 1 'ID=',A,' BY NAME OR STORM ID MATCHING. INDICES OF ', - 2 'MATCHING STORMS ARE:'/(4X,30I4)) - -C FINAL CHECK: FIND THE CLOSEST STORMS TO EACH OF THE STORMS -C THAT WERE DETERMINED TO BE THE SAME USING THE ABOVE PROCEDURE. -C COMPARE POSITIONS EXTRAPOLATED TO THE COMMON TIMES. - - NSVSAM=NSAME - DO NS=1,NSVSAM - ISAME=0 - DISTMN=1.E10 - -C RECOVER DATE, UTC, LAT/LON, STORM MOTION FOR SUBJECT STORM - - BUFINZ=OVRREC(INDSAM(NS)) - - DO IV=1,9 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) - VITVAR(IV)=REAL(IVTVAR(IV))*VITFAC(IV) - ENDDO - IF(LATNS .EQ. 'S') STMLTZ=-STMLTZ - IF(LONEW .EQ. 'W') STMLNZ=360.-STMLNZ - DAYZ=SRTDAY(NS) - WRITE(6,1521) NS,NCHECK,STMNMZ,STMIDZ,IDATEZ,IUTCZ,STMLTZ, - 1 STMLNZ,STMDRZ,STMSPZ,DAYZ,RMAXZ - 1521 FORMAT(/'...BEGINNING PROXIMITY CHECK WITH INDEX=',I3,' AND ', - 1 'NUMBER OF STORMS TO COMPARE=',I3/4X,'STORM=',A,'WITH ID', - 2 '=',A,'. IDATEZ,IUTCZ,STMLTZ,STMLNZ,STMDRZ,STMSPZ,DAYZ,', - 3 'RMAXZ='/3X,I9,I5,6F12.3) - - DO 1580 NCK=1,NCHECK - -C PICK ONLY STORMS THAT HAVEN'T YET BEEN RECOGNIZED AS BEING THE -C SAME AND THAT ARE NOT THEMSELVES. - - IF(IOVRLP(NCK) .LT. 0 .OR. NCK .EQ. INDSAM(NS)) GO TO 1580 - -C RECOVER DATE, UTC, LAT/LON, STORM MOTION AND RMAX FOR COMPARISON -C STORM - - BUFINX=OVRREC(NCK) - DO IV=1,9 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVRX(IV),IERDEC,FMTVIT(IV), - 1 BUFINX) - VITVRX(IV)=REAL(IVTVRX(IV))*VITFAC(IV) - ENDDO - IF(LATNSX .EQ. 'S') STMLTX=-STMLTX - IF(LONEWX .EQ. 'W') STMLNX=360.-STMLNX - CALL ZTIME(IDATEX,IUTCX,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYX) - -C PICK ONLY STORMS THAT ARE NOT COTEMPORANEOUS. - - IF(ABS(DAYX-SRTDAY(NS)) .LT. FIVMIN) THEN -C WRITE(6,1553) NCK,INDSAM(NS) -C1553 FORMAT('###RECORD ',I3,' HAS SAME DATE/TIME AS RECORD #',I3,'. ', -C 1 'IT SHOULD HAVE BEEN TREATED BY THE COTEMPORANEOUS CHECK.') - GO TO 1580 - ENDIF - - IF(STMNMZ .NE. 'NAMELESS' .AND. STMNMX .NE. 'NAMELESS') THEN -C WRITE(6,1557) NCK,INDSAM(NS) -C1557 FORMAT('###RECORDS ',I3,' AND',I3,' BOTH HAVE NAMES. THEY ', -C 1 'SHOULD HAVE BEEN TREATED BY THE PREVIOUS MATCHING CHECK.') - GO TO 1580 - ENDIF - -C CAN THEY CAN BE DEFINITIVELY PROVEN NOT TO BE THE SAME STORM? -C IF THEY ARE BOTH BANG STORMS OR BOTH NOT BANG STORMS, THE RSMCS -C AND STORMS IDS CAN BE COMPARED DIRECTLY. OTHERWISE, WE MUST LOOK -C IN THE ALIAS FILE TO SEE IF THE SAME RSMC HAS OBSERVED EACH. - - IF(RSMCZ .EQ. RSMCX .AND. STMIDZ .NE. STMIDX) THEN -C WRITE(6,2551) RSMCZ,STMIDZ,STMIDX -C2551 FORMAT('...DIRECT COMPARISON OF STORM IDS FOR THE SAME RSMC ', -C 1 'GIVES UNAMBIGUOUSLY DIFFERENT STORMS, RSMC,STORM IDS=', -C 2 3(A,1X)) - GO TO 1580 - ENDIF - -C LOOK IN THE ALIAS FILE - - IFNDOV=0 - IRECOV=0 - REWIND IUNTAN - 2552 READ(IUNTAN,261,END=2560) NALOV,STNMOV,(RSMCOV(NAL),STIDOV(NAL), - 1 NAL=1,NALOV) - IRECOV=IRECOV+1 - - DO NALX=1,NALOV - IF((RSMCX(1:1) .EQ. '!' .AND. STMIDX .EQ. STIDOV(NALX)) .OR. - 1 (RSMCX(1:1) .NE. '!' .AND. - 2 RSMCX .EQ. RSMCOV(NALX) .AND. STMIDX .EQ. STIDOV(NALX))) THEN - IFNDOV=1 - DO NALZ=2,NALOV - IF(RSMCZ .EQ. RSMCOV(NALZ) .AND. STMIDZ .NE. STIDOV(NALZ)) THEN -C WRITE(6,2553) IRECOV,RSMCX,STMIDX,NALZ,RSMCOV(NALZ),STIDOV(NALZ) -C 1 STMIDZ -C 2553 FORMAT('###ALIAS RECORD',I3,' MATCHES POTENTIAL OVERLAPPING ', -C 1 'STORM WITH RSMC,ID=',2(A,1X,)/4X,'BUT FOR ALIAS #',I3, -C 2 ' RSMC=',A,' IS THE SAME BUT STORM IDS=',2(A,1X),' ARE ', -C 3 'DIFFERENT.') - GO TO 1580 - ENDIF - ENDDO - ENDIF - ENDDO - GO TO 2552 - - 2560 CONTINUE - - IF(IFNDOV .EQ. 0 .AND. RSMCX(1:1) .EQ. '!') THEN - WRITE(6,2561) STMNMX,RSMCX,STMIDX - 2561 FORMAT('...STORM ',A,' WITH RSMC AND ID=',2(A,1X),' WAS NOT ', - 1 'FOUND IN THE ALIAS FILE. ABORT1') - CALL ABORT1(' RSMCCK',2561) - ENDIF - - ISAME=ISAME+1 - DISTZX=DISTSP(STMLTZ,STMLNZ,STMLTX,STMLNX)*1.E-3 - -C WRITE(6,1571) STMNMX,STMIDX,NCK,IDATEX,IUTCX,STMLTX,STMLNX, -C 1 STMDRX,STMSPX,DAYX,DISTZX,RMAXX -C1571 FORMAT('...BEGINNING COMPARISON WITH STORM=',A,'WITH ID=',A,'. ', -C 1 'INDEX,IDATEX,IUTCX,STMLTX,STMLNX,STMDRX,STMSPX,DAYX,', -C 2 'DISTZX,RMAXX='/4X,I3,I10,I5,7F12.3) - IF(DISTZX .LT. DISTMN) THEN - DISTMN=DISTZX - NCLOSE=NCK - DAYSAV=DAYX - IUTCSV=IUTCX - IDATSV=IDATEX - STLTSV=STMLTX - STLNSV=STMLNX - STDRSV=STMDRX - STSPSV=STMSPX - RMAXSV=RMAXX - ENDIF - 1580 CONTINUE - - IF(ISAME .GT. 0) THEN - WRITE(6,1581) NS,NCLOSE,DISTMN,OVRREC(INDSAM(NS)),OVRREC(NCLOSE) - 1581 FORMAT(/'...FOR NS=',I3,', CLOSEST STORM IS INDEX=',I3,' WITH ', - 1 'DISTANCE=',F8.1,' KM. RECORDS ARE:'/4X,'Z...',A/4X, - 2 'X...',A/) - - BUFINX=OVRREC(NCLOSE) - - IF(RMAXZ .LT. 0.0) THEN - DO NBA=1,NBASIN - IF(STMIDZ(3:3) .EQ. IDBASN(NBA)) THEN - IBASN=NBA - GO TO 1546 - ENDIF - ENDDO - 1546 CONTINUE - RMAXZ=TCCLIM(9,IBASN) - WRITE(6,1583) NREC,RMAXZ,NABASN(IBASN) - 1583 FORMAT('###RMAXZ MISSING FOR PROXIMITY CHECK ON RECORD',I3,'.'/4X, - 1 'REPLACEMENT VALUE WILL BE A CLIMATOLOGICAL GUESS OF ', - 2 F6.1,' KM FOR BASIN ',A,'.') - ENDIF - - IF(RMAXSV .LT. 0.0) THEN - DO NBA=1,NBASIN - IF(STMIDX(3:3) .EQ. IDBASN(NBA)) THEN - IBASN=NBA - GO TO 1556 - ENDIF - ENDDO - 1556 CONTINUE - RMAXSV=TCCLIM(9,IBASN) - WRITE(6,1584) NREC,RMAXSV,NABASN(IBASN) - 1584 FORMAT('###RMAXSV MISSING FOR PROXIMITY CHECK ON RECORD',I3,'. ', - 1 'REPLACEMENT VALUE WILL BE A CLIMATOLOGICAL GUESS '/4X, - 2 'OF ',F6.1,' KM FOR BASIN ',A,'.') - ENDIF - - DTXZ=DAYSAV-DAYZ - DSTFAC=DTXZ*FACSPD - CALL DS2UV(USTMZ,VSTMZ,STMDRZ,STMSPZ) - CALL DS2UV(USTMX,VSTMX,STDRSV,STSPSV) - EXTLTZ=STMLTZ+VSTMZ*DSTFAC - EXTLNZ=STMLNZ+USTMZ*DSTFAC/COSD(EXTLTZ) - EXTLTX=STLTSV-VSTMX*DSTFAC - EXTLNX=STLNSV-USTMX*DSTFAC/COSD(EXTLTX) - DSTX2Z=DISTSP(STMLTZ,STMLNZ,EXTLTX,EXTLNX)*1.E-3 - DSTZ2X=DISTSP(STLTSV,STLNSV,EXTLTZ,EXTLNZ)*1.E-3 - -C LAST CRITERION FOR FINDING THE SAME STORM IS DISTANCE - - DSTOLP=RMAXZ+RMAXSV - IF(DSTZ2X .GE. DSTOLP .OR. DSTX2Z .GE. DSTOLP) THEN -C WRITE(6,1585) -C1585 FORMAT(/'...STORMS ARE NOT CONSIDERED THE SAME SINCE NO ', -C 1 'OVERLAPPING IS PRESENT AT A COMMON EXTRAPOLATED TIME.') - - ELSE - WRITE(6,1587) DAYZ,DAYX,DTXZ,DISTMN,STMNMZ,STMIDZ,STMLTZ,EXTLTZ, - 1 STMLNZ,EXTLNZ,DSTZ2X,RMAXZ,STMNMX,STMIDX,STLTSV, - 2 EXTLTX,STLNSV,EXTLNX,DSTX2Z,RMAXSV - 1587 FORMAT(/'...EXTRAPOLATION TABLE TO COMMON TIMES: DAYX,DAYZ,DTXZ', - 1 ',DISTMN=',4F12.3/20X,'SUBJECT (Z) STORM & ID',6X, - 2 'T=0LAT',6X,'T=XLAT',6X,'T=0LON',6X,'T=XLON',2X, - 3 'DISTANCE TO X',3X,'RMAXZ'/2(25X,A,2X,A,3X,6F12.3/),20X, - 4 'COMPARISON (X) STORM & ID',3X, - 5 'T=0LAT',6X,'T=ZLAT',6X,'T=0LON',6X,'T=ZLON',2X, - 6 'DISTANCE TO Z',3X,'RMAXX') - WRITE(6,1589) - 1589 FORMAT(/'###STORMS ARE OVERLAPPED AT A COMMON EXTRAPOLATED TIME.', - 1 ' THEY ARE ASSUMED TO BE THE SAME.###') - - BUFINX=OVRREC(NCLOSE) - NSAME=NSAME+1 - IDATE(NSAME)=IDATSV - IUTC(NSAME)=IUTCSV - IOVRLP(NCLOSE)=-NCLOSE - INDSAM(NSAME)=NCLOSE - STMID(NSAME)=STMIDX - STMNAM(NSAME)=STMNMX - RSMC (NSAME)=RSMCX - IDASRT(NSAME)=NSAME - SRTDAY(NSAME)=DAYSAV - IF(RSMC(NSAME)(1:1) .EQ. '!') IBANG=NSAME - - ENDIF - ENDIF - ENDDO - -C PROCESS ALL RECORDS FOR THE SAME STORM - - IF(NSAME .GT. 1) THEN - BUFINZ=OKAREC(NOK) - WRITE(6,577) NSAME,STMNMZ,STMIDZ,(NS,IDATE(NS),IUTC(NS), - 1 RSMC(NS),STMID(NS),STMNAM(NS),NS=1,NSAME) - 577 FORMAT('...',I3,' RECORDS APPEAR TO BE THE SAME STORM WITH NAME,', - 1 ' ID=',2(1X,A),' AND MUST BE UNIFIED.'/10X,' DATE ', - 2 'UTC RSMC STMID NAME ARE:'/(4X,I3,I10,2X,I5,2X,2(3X, - 3 A),4X,A)) - -c Sort the records by time - - CALL SORTRL(SRTDAY(1:NSAME),IDASRT(1:NSAME),NSAME) - -C LOOK IN THE ALIAS FILE TO SEE WHICH STORM ALIASES CORRESPOND -C TO THE BANG STORM. - - IF(IBANG .NE. 0) THEN - STMIDX=STMID(IBANG) - STMNMX=STMNAM(IBANG) - RSMCX=RSMC (IBANG) - - REWIND IUNTAN - NRECAL=0 - 552 READ(IUNTAN,261,END=555) NALMX,STNMAL,(RSMCAL(NAL),STIDAL(NAL), - 1 NAL=1,NALMX) - NRECAL=NRECAL+1 - -C NO MATCH FOR BOTH STORMS THAT ARE NAMED. - - IF(STMNMX .NE. 'NAMELESS' .AND. - 1 STNMAL .NE. 'NAMELESS' .AND. - 2 STNMAL .NE. STMNMX) GO TO 552 - -C POSSIBLE MATCH REMAINS: MATCH STORM ID ONLY IN THIS CASE SINCE -C THEY ARE BOTH BANG STORMS. - - DO NAL=1,NALMX - IF(STMIDX .EQ. STIDAL(NAL)) THEN - IFNDAL=NRECAL - GO TO 555 - ENDIF - ENDDO - GO TO 552 - - 555 CONTINUE - - IF(IFNDAL .EQ. 0) THEN - WRITE(6,5571) IBANG,STMNMX,RSMCX,STMIDX - 5571 FORMAT('******BANG STORM WITH INDEX=',I3,', NAME,RSMC,ID=', - 1 3(A,1X),' CANNOT BE FOUND IN THE ALIAS FILE. ABORT1') - CALL ABORT1(' RSMCCK',5571) - - ELSE - WRITE(6,5573) IBANG,STMNMX,RSMCX,STMIDX,IFNDAL - 5573 FORMAT('...BANG STORM WITH INDEX=',I3,', NAME,RSMC,ID=',3(A,1X), - 1 ' WAS FOUND AS RECORD#',I4,' IN THE ALIAS FILE. ') - ENDIF - ENDIF - -C LOOK FOR ALL THE RSMCS THAT HAVE OBSERVED THIS STORM SO FAR - - NRSMC=NALMX-1 - NALMXZ=NALMX - -C LOAD RSMCS FROM THE ALIAS FILE, IF ANY - - DO NRS=2,NALMX - DO NRSZ=1,NRSMCX - IF(RSMCAL(NRS) .EQ. RSMCID(NRSZ)) THEN - NRSMCF=NRSZ - ENDIF - ENDDO - IRSMC(NRS-1)=NRSMCF - WRITE(6,6633) NRS-1,RSMCID(NRSMCF) - 6633 FORMAT('...STORING ALIAS #',I3,' WHICH IS ',A) - ENDDO - - DO NS=1,NSAME - - IF(RSMC(NS) (1:1) .EQ. '!') THEN - NPS=2 - NPE=4 - ELSE - NPS=1 - NPE=1 - ENDIF - - DO NP=NPS,NPE - -C COMBINED RSMC CASE - - IF(RSMC(NS) (1:1) .EQ. '!') THEN - DO NRSZ=1,NRSMCX - IF(RSMC(NS)(NP:NP) .EQ. RSMCAP(NRSZ)) THEN - NRSMCF=NRSZ - GO TO 591 - ENDIF - ENDDO - -C INDIVIDUAL RSMC CASE - - ELSE - DO NRSZ=1,NRSMCX - IF(RSMC(NS) .EQ. RSMCID(NRSZ)) THEN - NRSMCF=NRSZ - GO TO 591 - ENDIF - ENDDO - ENDIF - 591 CONTINUE - - - ISAV=0 - DO NRSMS=1,NRSMC - IF(IRSMC(NRSMS) .EQ. NRSMCF) ISAV=ISAV+1 - ENDDO - - IF(ISAV .EQ. 0) THEN - NRSMC=NRSMC+1 - IRSMC(NRSMC)=NRSMCF - -C STORE A NEW RSMC IF NECESSARY. - - IADDAL=0 - DO NAL=2,NALMXZ - IF(RSMCAL(NAL) .EQ. RSMCID(NRSMCF)) IADDAL=IADDAL+1 -C WRITE(6,6441) NAL,RSMCAL(NAL),RSMCID(NRSMCF),IADDAL -C6441 FORMAT('...DEBUGGING, NAL,RSMCAL(NAL),RSMCID(NRSMCF),IADDAL=', -C 1 I3,2(1X,A),I3) - ENDDO - - IF(IADDAL .EQ. 0) THEN - WRITE(6,641) RSMCID(NRSMCF),STMID(NS) - 641 FORMAT('...THE LIST OF OBSERVERS WILL INCLUDE RSMC=',A,' FOR ', - 1 'STORM ID=',A) - NALMXZ=NALMXZ+1 - STIDAL(NALMXZ)=STMID(NS) - RSMCAL(NALMXZ)=RSMCID(NRSMCF) - - ELSE - WRITE(6,643) RSMCID(NRSMCF),STMNMZ - 643 FORMAT('...RSMC=',A,' IS ALREADY IN THE LIST OF OBSERVERS FOR ',A) - ENDIF - - ENDIF - - ENDDO - ENDDO - WRITE(6,651) STMNMZ,STMIDZ,NRSMC,(RSMCID(IRSMC(NRS)),NRS=1,NRSMC) - 651 FORMAT(/'...SUMMARY OF ALL OBSERVING RSMCS FOR STORM WITH NAME,', - 1 'ID=',2(1X,A),'. NUMBER OF RSMCS=',I3/4X,10(A,2X)) - -C IF MORE THAN ONE RSMC HAS OBSERVED STORM, UNIFY THE STORM ID -C AND RSMC IF ANY NEW RSMCS HAVE BEEN ADDED. - - IF(NRSMC .GT. 1 .OR. IFNDAL .NE. 0) THEN - - IF(NALMX .EQ. NALMXZ) THEN - -C NO NEW RSMC NEED BE ADDED. COPY STORM ID AND RSMC FROM A BANG -C RECORD. - - IRITAL=0 - - IF(IFNDAL .NE. 0) THEN - WRITE(6,6653) STMNMZ,STMIDZ,STNMAL,STIDAL(1),RSMCAL(1) - 6653 FORMAT(/'...STORM WITH NAME, ID=',2(1X,A),' WAS FOUND IN ALIAS ', - 1 'FILE WITH NAME=',A,'. ID,RSMC=',2(A,1X)) - STMIDZ=STIDAL(1) - RSMCZ=RSMCAL(1) - STMNMZ=STNMAL - - ELSE IF(IBANG .NE. 0) THEN - WRITE(6,653) - 653 FORMAT('...STORM NOT FOUND IN ALIAS FILE AND NO NEW RSMC HAS ', - 1 'BEEN ADDED. STORE RSMC AND STORM ID FROM A BANG RECORD.') - STMIDZ=STMID(IBANG) - RSMCZ=RSMC(IBANG) - - ELSE - WRITE(6,655) STMNMZ,STMIDZ - 655 FORMAT(/'******STORM WITH NAME, ID=',2(1X,A),' IS NOT LISTED AS ', - 1 'A BANG STORM, CANNOT BE FOUND IN THE ALIAS FILE,'/7X, - 2 'HAS MORE THAN ONE RSMC BUT NONE ARE TO BE ADDED. ABORT1') - CALL ABORT1(' RSMCCK',655) - ENDIF - - ELSE - -C ADD A NEW RSMC. COPY RSMC FROM THE BANG STORM RECORD. THEN ADD -C NEW RSMCS. IF THERE IS NO BANG RECORD, MAKE UP A NEW RSMC -C AND STORM ID BASED ON THE EARLIEST RECORD. - - IRITAL=1 - - NWRSMC=NALMXZ-NALMX - WRITE(6,6657) NWRSMC - 6657 FORMAT('...',I3,' NEW RSMCS WILL BE ADDED.') - -c Mark a relocation flag for the record in which a new -c rsmc has observed storm - - do ns=2,nsame - if(rsmc(idasrt(ns)) .ne. rsmc(idasrt(1))) then - write(6,6679) ns,idasrt(1),rsmc(idasrt(1)),idasrt(ns), - 1 rsmc(idasrt(ns)),nsame - 6679 format('...For ns=',i3,' a new observing rsmc has been detected.', - 1 ' Index,rsmc (first,new)=',2(i3,1x,a)/4x,'Total number ', - 2 'of observed records=',i3,' We insert a relocation flag ', - 3 'in the new record.') - bufinx=ovrrec(indsam(idasrt(ns))) - relocx='R' - ovrrec(indsam(idasrt(ns)))=bufinx - write(6,5509) indsam(idasrt(ns)),bufinx - 5509 format('...Record index and corrected record are:',i3/4x,a) - endif - enddo - - IF(IBANG .NE. 0) THEN - STMIDZ=STMID(IBANG) - RSMCZ=RSMC(IBANG) - LNRSMC=INDEX(RSMCZ,' ')-1 - WRITE(6,657) LNRSMC - 657 FORMAT('...BANG STORM EXISTS: STORE RSMC AND STORM ID FROM A ', - 1 'BANG RECORD, LENGTH IS:',I2) - - NWSLOT=0 - DO NAD=1,NWRSMC - NWSLOT=NWSLOT+1 - - IF(LNRSMC+NWSLOT .LE. 4) THEN - DO NRSZ=1,NRSMCX - IF(RSMCAL(NALMX+NAD) .EQ. RSMCID(NRSZ)) THEN -c write(6,6541) nad,nalmx,nwslot,lnrsmc+nwslot,nrsz, -c 1 rsmcal(nalmx+nad),rsmcid(nrsz) -c6541 format('...debugging, nad,nalmx,nwslot,lnrsmc+nwslot,nrsz,', -c 1 'rsmcal(nalmx+nad),rsmcid(nrsz)'/4x,5i4,2(1x,a)) - NRSMCF=NRSZ - GO TO 6561 - ENDIF - ENDDO - 6561 CONTINUE - RSMCZ(LNRSMC+NWSLOT:LNRSMC+NWSLOT)=RSMCAP(NRSMCF) - WRITE(6,6563) RSMCAP(NRSMCF),RSMCZ - 6563 FORMAT('...ADDING RSMC=',A,' TO AN ALREADY DEFINED BANG STORM ', - 1 'RSMC. UPDATED RSMC=',A) - - ELSE - WRITE(6,6567) NWSLOT,LNRSMC,NWRSMC - 6567 FORMAT('###INSUFFICIENT SPACE TO ADD NEW RSMC, NWSLOT,LNRSMC,', - 1 'NWRSMC=',3I3) - ENDIF - ENDDO - - ELSE - -C IN THIS CASE, NO OBSERVERS ARE BANG RECORDS AND THE STORM IS -C NOT IN THE ALIAS FILE. AN ALIAS RECORD MUST BE CREATED AND -C WRITTEN TO THE ALIAS FILE - - WRITE(6,659) IDASRT(1),STMID(IDASRT(1)),STMNAM(IDASRT(1)) - 659 FORMAT(/'...NO BANG STORMS EXIST. EARLIEST RECORD IS:',I3, - 1 '. STORM ID IS: ',A,' STORM NAME IS: ',A) - -C SUBSTITUTE THE ID OF THE FIRST OBSERVING RSMC AND CONSTRUCT -C A UNIFIED RSMC. SUBSTITUTE STORM NAME IF FIRST OBSERVATION -C DOES NOT HAVE NAMELESS AS A STORM NAME. - - RSMCZ=RSMC(IDASRT(1)) - STMIDZ=STMID(IDASRT(1)) - STMNMZ=STMNAM(IDASRT(1)) - -C FIRST TWO RSMC SLOTS - - IF(RSMCZ(1:1) .EQ. '!') THEN - WRITE(6,663) RSMC(IDASRT(1))(1:2) - 663 FORMAT('...THIS RECORD IS A MULTIPLY OBSERVED STORM. COPY THE ', - 1 'RSMCAP AND BANG FROM THIS RECORD=',A) - RSMCZ(1:2)=RSMC(IDASRT(1))(1:2) - DO NRSZ=1,NRSMCX - IF(RSMC(IDASRT(1))(2:2) .EQ. RSMCAP(NRSZ)) THEN - NRSST=NRSZ - GO TO 661 - ENDIF - ENDDO - 661 CONTINUE - - ELSE - WRITE(6,667) - 667 FORMAT('...THIS RECORD IS A SINGLY OBSERVED STORM. COPY THE ', - 1 'RSMC FROM THIS RECORD.') - RSMCZ(1:1)='!' - DO NRSZ=1,NRSMCX - IF(RSMC(IDASRT(1)) .EQ. RSMCID(NRSZ)) THEN - NRSST=NRSZ - GO TO 671 - ENDIF - ENDDO - 671 CONTINUE - RSMCZ(2:2)=RSMCAP(NRSST) - ENDIF - -C REMAINING RSMC SLOTS - - NID=2 - RSMCZ(3:4)=' ' - DO NRS=1,NRSMC - IF(RSMCID(IRSMC(NRS)) .NE. RSMCID(NRSST)) THEN - NID=NID+1 - IF(NID .GT. 4) GO TO 680 - RSMCZ(NID:NID)=RSMCAP(IRSMC(NRS)) - WRITE(6,679) RSMCAP(IRSMC(NRS)),IRSMC(NRS),NID,RSMCZ - 679 FORMAT('...ADDING RSMCAP ',A,' FOR RSMC ',I2,' IN SLOT ',I3, - 1 ' RSMCZ=',A) - ENDIF - 680 CONTINUE - ENDDO - - ENDIF - - ENDIF - -C HAS THE STORM BEEN NAMED BY SOMEONE OVER ITS HISTORY? IF SO, -C SUBSTITUTE THE NAME FOR THE ALIAS FILE. - - IF(STMNMZ .EQ. 'NAMELESS') THEN - DO NS=1,NSAME - IF(STMNAM(NS) .NE. 'NAMELESS') THEN - STMNMZ=STMNAM(NS) - WRITE(6,6689) STMNAM(NS),NS - 6689 FORMAT('###STORM NAMELESS WILL BE RENAMED ',A,' IN THE ALIAS ', - 1 'FILE. INDEX OF NAMED STORM=',I3) - IRITAL=1 - GO TO 6691 - ENDIF - ENDDO - 6691 CONTINUE - ENDIF - -C IF NECESSARY, WRITE ALIAS RECORD AND SUBSTITUTE UNIFIED RSMC AND -C STORM ID. - - IF(IRITAL .EQ. 1) THEN - WRITE(6,681) STMNMZ,STMIDZ,RSMCZ - 681 FORMAT(/'...WRITING A UNIFIED ALIAS RECORD FOR STORM NAME=',A, - 1 '. STORM ID AND UNIFIED RSMC ARE:',2(1X,A)) - NALADD=NALADD+1 - STIDAL(1)=STMIDZ - RSMCAL(1)=RSMCZ - DAYZ=-999.0 - CALL AKASAV(NALADD,NALMXZ,DAYZ,STMNMZ,RSMCAL,STIDAL) - ENDIF - - DO NS=1,NSAME - BUFINX=OVRREC(INDSAM(NS)) -C WRITE(6,683) NS,INDSAM(NS),BUFINX -C 683 FORMAT('...SUBSTITUTING UNIFIED RSMC AND STMID. NS,INDSAM,RECORD', -C 1 ' ARE:',2I3/' ...',A) - STMIDX=STMIDZ - RSMCX=RSMCZ - OVRREC(INDSAM(NS))=BUFINX -C WRITE(6,683) NS,INDSAM(NS),BUFINX - ENDDO - - ELSE - WRITE(6,693) - 693 FORMAT(/'...ONLY 1 RSMC HAS OBSERVED STORM. THERE IS NO NEED TO', - 1 ' UNIFY THE RSMC AND STORM ID IF STORM IDS ARE THE SAME.' - 2 /4X,'WE PROCEED TO CHECK STORM ID CONSISTENCY.') - - ISAME=0 - DO NS=2,NSAME - IF(STMID(NS) .NE. STMIDZ) THEN - IF(ABS(SRTDAY(NS)-SRTDAY(1)) .LE. DTOVR) THEN - ISAME=ISAME+1 - IETYP=6 - WRITE(6,1683) DTOVR,INDSAM(NS),INDSAM(1),STMID(NS),STMIDZ, - 1 STMNAM(NS),STMNMZ,SRTDAY(NS),SRTDAY(1), - 2 OVRREC(INDSAM(NS)),OVRREC(INDSAM(1)) - 1683 FORMAT(/'###TWO STORMS OBSERVED BY THE SAME RSMC WITH TIMES ', - 1 'DIFFERING BY LESS THAN ',F5.1,' DAYS AND DIFFERENT ', - 2 'STORM ID.'/4X,'THESE ARE PROBABLY THE SAME STORM. IN ', - 3 'ORDER (NS,1), INDEX, STORM ID, STORM NAME, DAY AND ', - 4 'RECORD ARE:'/10X,2I5,4(2X,A),2F12.3/2(4X,A/)) - ELSE - WRITE(6,1687) DTOVR,INDSAM(NS),INDSAM(1),STMID(NS),STMIDZ, - 1 STMNAM(NS),STMNMZ,SRTDAY(NS),SRTDAY(1), - 2 OVRREC(INDSAM(NS)),OVRREC(INDSAM(1)) - 1687 FORMAT(/'###TWO STORMS OBSERVED BY THE SAME RSMC WITH TIMES ', - 1 'DIFFERING BY MORE THAN ',F5.1,' DAYS AND DIFFERENT ', - 2 'STORM ID.'/4X,'THESE ARE PROBABLY NOT THE SAME STORM.', - 3 ' IN ORDER (NS,1), INDEX, STORM ID, STORM NAME, DAY ', - 4 'AND RECORD ARE:'/10X,2I5,4(2X,A),2F12.3/2(4X,A/)) - ENDIF - ENDIF - ENDDO - -C STORMS HAVE ALREADY BEEN SORTED IN CHRONOLOGICAL ORDER SO -C SUBSTITUTE THE STORM ID OF THE EARLIEST STORM. - - IF(ISAME .NE. 0) THEN - - WRITE(6,1695) IDASRT(1),STMID(IDASRT(1)),STMNAM(IDASRT(1)) - 1695 FORMAT(/'...EARLIEST RECORD IS:',I3,'. STORM ID IS: ',A,' STORM ', - 1 'NAME IS: ',A/4X,'THIS STORM ID AND RSMC WILL BE COPIED ', - 2 'TO THE FOLLOWING STORMS:') - DO NS=1,NSAME - BUFINX=OVRREC(INDSAM(NS)) - STMIDX=STMID(IDASRT(1)) - RSMCX =RSMC (IDASRT(1)) - OVRREC(INDSAM(NS))=BUFINX - IF(INDSAM(NS) .LE. NOKAY) IFRSMC(NUMOKA(INDSAM(NS)))=-IETYP - WRITE(6,1697) NS,INDSAM(NS),OVRREC(INDSAM(NS)) - 1697 FORMAT('...',I3,'...',I3,'...',A) - ENDDO - ENDIF - - ENDIF - - ELSE - WRITE(6,697) NOK,OKAREC(NOK) - 697 FORMAT('...OKAY RECORD ',I3,' IS UNIQUE AMONG OKAY AND SHORT-', - 1 'TERM HISTORY RECORDS. NO FURTHER PROCESSING WILL BE ', - 2 'DONE. RECORD IS:'/4X,'...',A,'...') - ENDIF - - 700 CONTINUE - ENDDO - CALL AKADMP(IUNTAL) - -C SAVE AS BAD RECORDS THOSE ORIGINAL RECORDS THAT HAVE BEEN -C UNIFIED, BUT NOT MULTIPLY OBSERVED, SO THAT THEY CAN BE -C COPIED TO THE ORIGINAL SHORT-TERM HISTORY FILE LATER BY RITSTH. - - DO NOK=1,NOKAY - - IF(OKAREC(NOK)(1:1) .NE. '!' .AND. - 1 OVRREC(NOK)(1:1) .EQ. '!') THEN - IETYP=30 - IFRSMC(NUMOKA(NOK))=IETYP - NADD=NADD+1 - NUNIFY=NUNIFY+1 - NUMBAD(NADD+NBAD)=NUMOKA(NOK) - BADREC(NADD+NBAD)=OKAREC(NOK) - ENDIF - - OKAREC(NOK)=OVRREC(NOK) - ENDDO - - WRITE(6,711) IUNTOK - 711 FORMAT(/'...WE HAVE UNIFIED ALL RECORDS AND ARE WRITING THEM TO ', - 1 'THE SCRATCH FILE.'/4X,'THEY WILL BE WRITTEN TO THE ', - 2 'ALIAS SHORT-TERM HISTORY FILE IF UPDATING IS REQUIRED.'/ - 3 4X,'OLD ALIAS SHORT-TERM HISTORY RECORDS WRITTEN TO ', - 4 'IUNTOK=',I3,' ARE:') - NRCOVR=0 - DO NHA=NOKAY+1,NCHECK - IF(IOVRLP(NHA) .NE. -999) THEN - NRCOVR=NRCOVR+1 - WRITE(IUNTOK,521) OVRREC(NHA) - WRITE(6,719) NRCOVR,OVRREC(NHA) - 719 FORMAT('...',I3,'...',A,'...') - OVRREC(NRCOVR)=OVRREC(NHA) - ENDIF - ENDDO - WRITE(6,721) NRCOVR - 721 FORMAT(/'...IMPORTANT NOTE: THE UPDATED OLD ALIAS SHORT-TERM ', - 1 'HISTORY RECORDS ARE RETURNED TO THE MAIN PROGRAM IN ', - 2 'OVRREC.'/4X,'THEY WILL BE COPIED INTO THE SCRATCH FILE ', - 3 '(INSTEAD OF USING CPYREC) WHEN FILES=F.'/4X,'THE NUMBER', - 4 ' OF RECORDS RETURNED IS:',I4) - -C COPY NEW ALIAS FILE TO AKAVIT. DON'T COPY RECORDS -C THAT ALREADY EXIST IN AKAVIT. - - REWIND IUNTAN - CALL AKACPY(IUNTAN,IUNTAL) - -C DO NOT CLEAR OUT THE NEW ALIAS FILE; AKAVIT MAY BE CHANGED BY -C RCNCIL LATER - - WRITE(6,1001) NOKAY,-NSUBR,-NUNIFY,NADD,NTEST, - 1 (ERCRS(NER),NER=1,NERCRS) - 1001 FORMAT(//'...RESULTS OF THE MULTIPLE RSMC CHECK ARE: NOKAY=',I4, - 1 ' NSUBR=',I4,' NUNIFY=',I4,' AND NADD=',I4,' FOR A ', - 2 'TOTAL OF ',I4,' RECORDS.'//4X,'ERROR CODES ARE:'/(6X,A)) - WRITE(6,1003) - 1003 FORMAT(/'...OKAY RECORDS ARE:',100X,'ERC'/) - DO NOK=1,NOKAY - WRITE(6,1009) NOK,NUMOKA(NOK),OKAREC(NOK),-IFRSMC(NUMOKA(NOK)) - 1009 FORMAT(3X,I4,'...',I4,'...',A,'...',I3) - ENDDO - IF(NADD .GT. 0) WRITE(6,1011) (NBAD+NBA,NUMBAD(NBAD+NBA), - 1 BADREC(NBAD+NBA), - 2 IFRSMC(NUMBAD(NBAD+NBA)), - 3 NBA=1,NADD) - 1011 FORMAT(/' ADDED BAD RECORDS ARE:',95X,'ERC'/(3X,I4,'...',I4, - 1 '...',A,'...',I3)) - NBAD=NBAD+NADD - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: BASNCK CHECKS FOR PROPERLY IDENTIFIED BASINS -C PRGMMR: S. LORD ORG: NP22 DATE: 1992-02-24 -C -C ABSTRACT: INPUT RECORDS ARE CHECKED FOR PROPERLY IDENTIFIED BASINS. -C THE INPUT LATIDUDE AND LONGITUDE ARE CHECKED AGAINST -C TABULATED MIN AND MAX LATITUDES AND LONGITUDES FOR THE -C SPECIFIED BASIN. INCONSISTENCIES ARE FLAGGED. -C -C PROGRAM HISTORY LOG: -C 1992-02-19 S. LORD -C -C USAGE: CALL BASNCK(STMIDX,RLTSTM,RLNSTM,NBA,IPRT,IER) -C INPUT ARGUMENT LIST: -C STMIDX - 3 CHARACTER STORM ID. THIRD CHARACTER CARRIES BASIN -C IDENTIFIER -C IPRT - PRINT LEVEL. =1 FOR PRINTOUT; =0 FOR NO PRINTOUT -C -C OUTPUT ARGUMENT LIST: -C NBA - BASIN NUMBER CORRESPONDING TO THE INPUT LAT/LON -C IER - ERROR RETURN CODE: -C 3: STORM IS NOT IN A BASIN DEFINED BY THE TABULATED -C MINIMUM AND MAXIMUM LAT/LON -C 11: BASIN AND BASIN BOUNDARIES DO NOT MATCH. THIS DOES -C NOT NECESSARILY MEAN THERE IS AN ERROR SINCE THE -C STORM COULD HAVE ORIGINATED IN THAT BASIN AND MOVED -C TO ANOTHER -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE BASNCK(STMIDX,RLTSTM,RLNSTM,NBA,IPRT,IER) - - SAVE - - CHARACTER*(*) STMIDX - - PARAMETER (NBASIN=11) - - CHARACTER IDBASN*1 - - DIMENSION IDBASN(NBASIN),BSLTMN(NBASIN),BSLTMX(NBASIN), - 1 BSLNMN(NBASIN),BSLNMX(NBASIN) - - DATA IDBASN/'L','E','C','W','O','T','U','P','S','B','A'/ - -C BASIN BOUNDARIES: MIN AND MAX LATITUDES; MIN AND MAX LONGITUDES -C NOTE: SOME BOUNDARIES MAY OVERLAP, BUT SCANNING IS IN ORDER OF -C DECREASING PRIORITY SO BASINS SHOULD BE CAPTURED PROPERLY - - DATA BSLTMN/3*-20.,2*0.0,20.,3*-50.,2*0.0/, - 1 BSLTMX/4*60.,25.,40.,3*0.0,2*30./, - 2 BSLNMN/260.,220.,180.,2*100.,110.,90.,160.,40.,75.,40./, - 3 BSLNMX/350.,260.,220.,180.,125.,140.,160.,290.,90.,100.,75./ - - - IER=0 - -C RECOVER BASIN NUMBER FROM STORM ID -C WE ASSUME ALL BASIN IDS ARE VALID HERE - - DO NB=1,NBASIN - IF(STMIDX(3:3) .EQ. IDBASN(NB)) THEN - NBA=NB - GO TO 11 - ENDIF - ENDDO - 11 CONTINUE - - IF(RLTSTM .LT. BSLTMN(NBA) .OR. RLTSTM .GT. BSLTMX(NBA) .OR. - 1 RLNSTM .LT. BSLNMN(NBA) .OR. RLNSTM .GT. BSLNMX(NBA)) THEN - IF(IPRT .EQ. 1) WRITE(6,21) STMIDX,NBA,RLTSTM,RLNSTM,BSLTMN(NBA), - 1 BSLTMX(NBA),BSLNMN(NBA),BSLNMX(NBA) - 21 FORMAT(/'******BASIN IDENTIFIER AND LAT/LON ARE INCONSISTENT. A ', - 1 'POSSIBLE ERROR EXISTS OR THE STORM ORIGINATED IN A ', - 2 'DIFFERENT BASIN.'/4X,'STMIDX,NBA,RLTSTM,RLNSTM,BSLTMN(', - 3 'NBA),BSLTMX(NBA),BSLNMN(NBA),BSLNMX(NBA)='/4X,A,I3,6F8.1) - IER=11 - -C IN WHICH BASIN IS THE STORM REALLY LOCATED? - - DO NB=1,NBASIN - IF(RLTSTM .GE. BSLTMN(NB) .AND. RLTSTM .LE. BSLTMX(NB) .AND. - 1 RLNSTM .GE. BSLNMN(NB) .AND. RLNSTM .LE. BSLNMX(NB)) THEN - NBA=NB - RETURN - ENDIF - ENDDO - IER=3 - WRITE(6,51) STMIDX,NBA,RLTSTM,RLNSTM,BSLTMN(NBA), - 1 BSLTMX(NBA),BSLNMN(NBA),BSLNMX(NBA) - 51 FORMAT(/'******STORM=',A,' IS NOT IN A DEFINED BASIN. NBA,', - 1 'RLTSTM,RLNSTM,BSLTMN(NBA),BSLTMX(NBA),BSLNMN(NBA),', - 2 'BSLNMX(NBA)='/I3,6F8.1) - ENDIF - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: AKASUB HANDLES STORAGE AND WRITING ALIAS RECORDS -C PRGMMR: S. LORD ORG: NP22 DATE: 1992-03-05 -C -C ABSTRACT: STORES ALIAS RECORDS UNTIL THEY ARE READY TO BE DUMPED TO -C DISK. DUMPING TO DISK INVOLVES FINDING THE ONE RECORD FOR -C EACH STORM THAT HAS THE EARLIEST DATE. COPYING FROM ONE -C UNIT TO ANOTHER ALSO INVOLVES FINDING THE EARLIEST DATE. -C FUNCTIONS ARE PERFORMED BY 3 SEPARATE ENTRIES AS SHOWN -C BELOW. AKASUB IS JUST A DUMMY HEADING. -C -C PROGRAM HISTORY LOG: -C 1992-03-05 S. LORD -C -C USAGE: CALL AKASUB(IUNITI,IUNITO,NAKREC,NAKA,DAYZ,AKANAM,AKRSMC, -C AKSTID) -C CALL AKASAV(NAKREC,NAKA,DAYZ,AKANAM,AKRSMC,AKSTID): STORES -C RECORDS -C CALL AKADMP(IUNITO): DUMPS RECORDS TO DISK -C CALL AKACPY(IUNITI,IUNITO): COPIES RECORDS FROM IUNITI TO -C IUNITO -C INPUT ARGUMENT LIST: -C IUNITI - INPUT UNIT NUMBER. FILE POSITIONING MUST BE HANDLED -C - OUTSIDE THIS ROUTINE. -C IUNITO - OUTPUT UNIT NUMBER. FILE POSITIONING MUST BE HANDLED -C - OUTSIDE THIS ROUTINE. -C NAKREC - RECORD NUMBER, FIRST RECORD IS 1 AND SO ON. -C NAKA - NUMBER OF ALIASES IN EACH RECORD. FIRST ALIAS IS -C - USUALLY A COMBINED OR UNIFIED ALIAS BEGINNING WITH A !. -C DAYZ - FRACTIONAL DAY FOR EACH RECORD -C AKANAM - STORM NAME (CHARACTER*9) -C AKRSMC - ARRAY CONTAINING ALL RSMCS (CHARACTER*4) -C AKSTID - ARRAY CONTAINING ALL STORM IDS (CHARACTER*3) -C -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE AKASUB(IUNITI,IUNITO,NAKREC,NAKA,DAYZ,AKANAM,AKRSMC, - 1 AKSTID,ICSTNM,ICRSMC,ICSTID,IFAKA) - - PARAMETER (MAXSTM=70) - PARAMETER (NOVRMX=MAXSTM) - PARAMETER (MAXAKA=10) - - SAVE - - DIMENSION NUMSAV(MAXSTM),SAVNAM(MAXSTM),SAVRSM(MAXSTM,MAXAKA), - 1 SAVID(MAXSTM,MAXAKA),SAVDAY(MAXSTM),INDSAM(MAXSTM) - - DIMENSION AKRSMC(NOVRMX),AKSTID(NOVRMX),RSMCCP(MAXAKA), - 1 STIDCP(MAXAKA) - - CHARACTER SAVNAM*9,SAVRSM*4,SAVID*3,STMNMX*9,RSMCCP*4,STIDCP*3 - CHARACTER*(*) AKANAM,AKRSMC,AKSTID,ICSTNM,ICRSMC,ICSTID - - LOGICAL FOUND - -C----------------------------------------------------------------------- -C THIS ENTRY STORES ALIAS ENTRIES - - ENTRY AKASAV(NAKREC,NAKA,DAYZ,AKANAM,AKRSMC,AKSTID) - - WRITE(6,1) NAKREC - 1 FORMAT(/'...ENTERING AKASAV TO STORE RECORD #',I3,'. RECORD IS:') - - NAKSAV=NAKREC - NUMSAV(NAKSAV)=NAKA - SAVNAM(NAKSAV)=AKANAM - SAVDAY(NAKSAV)=DAYZ - - SAVRSM(NAKSAV,1:NAKA)=AKRSMC(1:NAKA) - SAVID (NAKSAV,1:NAKA)=AKSTID(1:NAKA) - WRITE(6,11) NAKA,AKANAM,(AKRSMC(NAL),AKSTID(NAL),NAL=1,NAKA) - 11 FORMAT('...',I1,1X,A9,10(1X,A4,1X,A3)) - - RETURN - -C----------------------------------------------------------------------- -C THIS ENTRY DUMPS ALIAS ENTRIES. ONLY THE EARLIEST ENTRY FOR -C EACH STORM IS SAVED. - - ENTRY AKADMP(IUNITO) - - WRITE(6,21) IUNITO - 21 FORMAT(/'...ENTERING AKADMP TO WRITE EARLIEST UNIQUE ALIAS ', - 1 'RECORDS TO UNIT',I3,'. STORED RECORDS ARE:'/10X,'NAL', - 2 4X,'NAME',12X,'JDAY',5X,'RSMC',2X,'STMID') - DO NAK=1,NAKSAV - WRITE(6,23) NAK,NUMSAV(NAK),SAVNAM(NAK),SAVDAY(NAK), - 1 (SAVRSM(NAK,NS),SAVID(NAK,NS),NS=1,NUMSAV(NAK)) - 23 FORMAT(3X,I3,2X,I3,4X,A,3X,F12.3,10(3X,A)) - ENDDO - - NREC=0 - DO NAK=1,NAKSAV - IF(NUMSAV(NAK) .GT. 0) THEN - IFND=1 - INDSAM(IFND)=NAK - WRITE(6,27) NAK,IFND,SAVNAM(NAK),SAVDAY(NAK),(SAVRSM(NAK,NSAV), - 1 SAVID(NAK,NSAV),NSAV=1,NUMSAV(NAK)) - 27 FORMAT(/'...LOOKING FOR MATCHING STORM NAMES FOR INDEX=',I3, - 1 ', IFND=',I3,' STORM NAME= ',A,' WITH DAY=',F12.3/4X, - 2 'ALIASES ARE: ',10(A,1X,A,'; ')) - WRITE(6,29) - 29 FORMAT('...IMPORTANT NOTE: ALIAS RECORDS WITH DATE=-999.0 WILL ', - 1 'ALWAYS BE COPIED.') - - DO NSAME=NAK+1,NAKSAV - IF(NUMSAV(NSAME) .GT. 0) THEN - FOUND=.FALSE. - -C SAME STORM NAME IF NOT NAMELESS - - IF(SAVNAM(NAK) .NE. 'NAMELESS' .AND. - 1 SAVNAM(NSAME) .NE. 'NAMELESS' .AND. - 2 SAVNAM(NAK) .EQ. SAVNAM(NSAME)) THEN - FOUND=.TRUE. - -C DIRECT COMPARISON OF STORM IDS FOR THE SAME RSMC - - ELSE - DO NAL2=1,NUMSAV(NAK) - DO NAL1=1,NUMSAV(NSAME) - IF(SAVRSM(NSAME,NAL1) .EQ. SAVRSM(NAK,NAL2) .AND. - 1 SAVID (NSAME,NAL1) .EQ. SAVID (NAK,NAL2)) FOUND=.TRUE. - ENDDO - ENDDO - ENDIF - - IF(FOUND) THEN - NUMSAV(NSAME)=-IABS(NUMSAV(NSAME)) - IFND=IFND+1 - INDSAM(IFND)=NSAME - WRITE(6,59) NSAME,IFND,SAVDAY(NSAME) - 59 FORMAT(/'...STORM NAME FOR INDEX=',I3,' MATCHES. IFND=',I3,' AND', - 1 ' DAY=',F12.3) - ENDIF - ENDIF - ENDDO - -C SINGLE OCCURRENCE - - IF(IFND .EQ. 1) THEN - NW=NAK - DAYMNZ=SAVDAY(NAK) - STMNMX=SAVNAM(NAK) - WRITE(6,61) NW,SAVNAM(NAK),SAVID(NAK,1) - 61 FORMAT('...INDEX',I3,' WITH NAME=',A,' AND ID=',A,' HAS ONLY A ', - 1 'SINGLE OCCURRENCE.') - -C IF THERE ARE MULTIPLE OCCURRENCES, WRITE ONLY THE EARLIEST RECORD, -C BUT SUBSTITUTE IN THE STORM NAME IF IT IS NOT NAMELESS. - - ELSE - WRITE(6,63) SAVNAM(NAK),SAVID(NAK,1) - 63 FORMAT('...STORM NAME=',A,' AND ID=',A,' HAS MULTIPLE ', - 1 'OCCURRENCES. WE LOOK FOR THE FIRST OCCURRENCE.') - DAYMNZ=1.E10 - STMNMX='NAMELESS' - DO IF=1,IFND - IF(STMNMX .EQ. 'NAMELESS' .AND. - 1 SAVNAM(INDSAM(IF)) .NE. 'NAMELESS') - 1 STMNMX=SAVNAM(INDSAM(IF)) - IF(SAVDAY(INDSAM(IF)) .LT. DAYMNZ) THEN - DAYMNZ=SAVDAY(INDSAM(IF)) - NW=INDSAM(IF) - ENDIF - ENDDO - ENDIF - -C WRITE THE RECORD - - NREC=NREC+1 - WRITE(IUNITO,81) IABS(NUMSAV(NW)),STMNMX,(SAVRSM(NW,NAL), - 1 SAVID(NW,NAL),NAL=1,IABS(NUMSAV(NW))) - 81 FORMAT(I1,1X,A9,10(1X,A4,1X,A3)) - WRITE(6,83) NREC,DAYMNZ,NW,IUNITO,STMNMX, - 1 IABS(NUMSAV(NW))-1,(SAVRSM(NW,NAL),SAVID(NW,NAL), - 2 NAL=1,IABS(NUMSAV(NW))) - 83 FORMAT('...ADDING NEW ALIAS RECORD ',I3,' WITH DATE=',F12.3, - 1 ' AND INDEX',I3,' TO UNIT ',I3,' FOR STORM NAME=',A,'.'/4X, - 2 'NUMBER OF OBSERVERS IS:',I2,' RSMC, STORM IDS ARE:'/10X, - 3 10(1X,A4,1X,A3)) - - ENDIF - ENDDO - WRITE(6,91) NREC,IUNITO - 91 FORMAT(/'...',I3,' RECORDS HAVE BEEN WRITTEN TO UNIT',I3) - - RETURN - -C----------------------------------------------------------------------- - - ENTRY AKACPY(IUNITI,IUNITO) - - NCPYAL=0 - WRITE(6,101) IUNITI,IUNITO - 101 FORMAT(/'...ENTERING AKACPY TO COPY ALIAS RECORDS FROM IUNITI=', - 1 I3,' TO IUNITO=',I3,':') - - 110 READ(IUNITI,81,END=180) NALMX,STMNMX,(RSMCCP(NAL),STIDCP(NAL), - 1 NAL=1,NALMX) - - DO NALZ=1,NAKSAV - FOUND=.FALSE. - -C SAME STORM NAME IF NOT NAMELESS - - IF(STMNMX .NE. 'NAMELESS' .AND. - 1 SAVNAM(NALZ) .NE. 'NAMELESS' .AND. - 2 STMNMX .EQ. SAVNAM(NALZ)) THEN - FOUND=.TRUE. - GO TO 171 - -C DIRECT COMPARISON OF STORM IDS FOR THE SAME RSMC - - ELSE - DO NAL2=1,NALMX - DO NAL1=1,NUMSAV(NALZ) - IF(SAVRSM(NALZ,NAL1) .EQ. RSMCCP(NAL2) .AND. - 1 SAVID (NALZ,NAL1) .EQ. STIDCP(NAL2)) FOUND=.TRUE. - ENDDO - ENDDO - ENDIF - - ENDDO - 171 CONTINUE - - IF(.NOT. FOUND) THEN - NCPYAL=NCPYAL+1 - WRITE(IUNITO,81) NALMX,STMNMX,(RSMCCP(NAL),STIDCP(NAL), - 1 NAL=1,NALMX) - WRITE(6,175) NALMX,STMNMX,(RSMCCP(NAL),STIDCP(NAL), - 1 NAL=1,NALMX) - 175 FORMAT('...',I1,1X,A9,10(1X,A4,1X,A3)) - - ELSE - WRITE(6,177) STMNMX - 177 FORMAT('...STORM ',A,' IS ALREADY IN OUTPUT ALIAS FILE. IT WILL ', - 1 'NOT BE COPIED.') - ENDIF - - GO TO 110 - - 180 CONTINUE - WRITE(6,181) NCPYAL,IUNITI,IUNITO - 181 FORMAT('...',I3,' RECORDS COPIED FROM UNIT',I3,' TO UNIT ',I3,'.') - - RETURN - -C----------------------------------------------------------------------- - - ENTRY AKAFND(IUNITI,ICSTNM,ICRSMC,ICSTID,NAKA,AKANAM,AKRSMC, - 1 AKSTID,IFAKA) - - ifaka=0 - irec=0 - rewind iuniti - 210 read(iuniti,81,end=240) nalmx,stmnmx,(rsmccp(nal),stidcp(nal), - 1 nal=1,min(nalmx,maxaka)) - irec=irec+1 - do nal=1,nalmx - if(icrsmc .eq. rsmccp(nal) .and. - 1 icstid .eq. stidcp(nal)) then - ifaka=irec - go to 240 - endif - enddo - go to 210 - 240 continue - - if(ifaka .gt. 0) then - - if(nalmx .gt. naka) then - write(6,241) nalmx,naka - 241 format('******Insufficient storage to return aliases. nalmx,', - 1 'naka=',2i5,' Abort.') - call abort1(' AKAFND',241) - endif - - naka=nalmx - akanam=stmnmx - akrsmc(1:nalmx)=rsmccp(1:nalmx) - akstid(1:nalmx)=stidcp(1:nalmx) -c write(6,251) naka,ifaka,icstnm,icrsmc,icstid,akanam, -c 1 (akrsmc(nal),akstid(nal),nal=1,naka) -c 251 format('...akafnd results: # of aliases=',i4,' matching alias ', -c 1 'record #=',i4,' input storm name,rsmc,id=',3(a,1x)/4x, -c 2 'matched name,rsmc,id=',a/(4x,10(1x,a4,1x,a3))) - - else -c write(6,271) icstnm,icrsmc,icstid -c 271 format('###Storm not found in akavit file, storm name,rsmc,', -c 1 'id are:',3(a,1x)) - endif - return - -C----------------------------------------------------------------------- - - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: TCCLIM TROPICAL CYCLONE CLIMATOLOGICAL VALUES -C PRGMMR: S. LORD ORG: NP22 DATE: 1992-04-07 -C -C ABSTRACT: RETURNS CLIMATOLOGICAL VALUES FOR SOME TROPICAL CYCLONE -C PROPERTIES. PROPERTIES ARE: CENTRAL PRESSURE OF STORM; -C ENVIRONMENTAL PRESSURE ON THAT ISOBAR RADIUS OF THE OUTERMOST -C CLOSED ISOBAR A SECOND ENTRY CONTAINS PRESSURE-WIND TABLES FOR -C THE ATLANTIC, EAST AND CENTRAL PACIFIC AND WEST PACIFIC BASINS. -C -C PROGRAM HISTORY LOG: -C 1992-04-07 S. LORD -C 1992-09-04 S. LORD ADDED PRESSURE WIND RELATIONSHIP -C -C USAGE: VALUE=TCCLIM(IVAR,IBASN) OR VALUE=TCPWTB(PRES,IBASN) -C INPUT ARGUMENT LIST: -C IVAR - VARIABLE NUMBER (7: CENTRAL PRESSURE) -C - (8: ENVIRONMENTAL PRESSURE) -C - (9: RADIUS OF OUTERMOST CLOSED ISOBAR) -C IBASN - BASIN NUMBER -C PRES - PRESSURE IN MB -C -C -C REMARKS: IVAR VALUES OF 7,8,9 ONLY ARE ALLOWED. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - FUNCTION TCCLIM(IVAR,IBASN) - - PARAMETER (NPRMAX=9) - - PARAMETER (NBASIN=11) - PARAMETER (ISECVR= 5,ITERVR=10) - PARAMETER (NSECVR=ITERVR-ISECVR) - - DIMENSION SECVCL(NBASIN,NSECVR-2),PRTABL(NBASIN,0:NPRMAX+1), - 1 VMTABL(NBASIN,0:NPRMAX+1) - - DATA SECVCL/3*940.0,3*930.0,2*970.0,3*960.0, - 1 3*1010.0,5*1008.0,3*1010.0, - 2 6*400.0,5*300.0/ - - DATA PRTABL/2*1020.,9*1020., 2*987.,9*976., - 2 2*979.,9*966., 2*970.,9*954., - 2 2*960.,9*941., 2*948.,9*927., - 3 2*935.,9*914., 2*921.,9*898., - 4 2*906.,9*879., 2*890.,9*858., - 5 2*850.,9*850./ - - DATA VMTABL/11*12.5,11*33.5,11*39.7,11*46.4,11*52.6,11*59.3, - 1 11*65.5,11*72.2,11*80.0,11*87.6,11*110./ - - ITABL=IVAR-(ISECVR+2)+1 - TCCLIM=SECVCL(IBASN,ITABL) - - RETURN - -C----------------------------------------------------------------------- - - ENTRY TCPWTB(PRESR,IBASN) - - DO IPR=1,NPRMAX - IF(PRESR .LE. PRTABL(IBASN,IPR-1) .AND. - 1 PRESR .GT. PRTABL(IBASN,IPR)) THEN - IPRZ=IPR - GO TO 11 - ENDIF - ENDDO - IPRZ=NPRMAX+1 - 11 CONTINUE - TCPWTB=VMTABL(IBASN,IPRZ-1)+ - 1 (VMTABL(IBASN,IPRZ)-VMTABL(IBASN,IPRZ-1))* - 2 (PRESR-PRTABL(IBASN,IPRZ-1))/ - 3 (PRTABL(IBASN,IPRZ)-PRTABL(IBASN,IPRZ-1)) - - RETURN - -C----------------------------------------------------------------------- - - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: RCNCIL MANAGES STORM CATALOG -C PRGMMR: S. LORD ORG: NP22 DATE: 1993-03-05 -C -C ABSTRACT: STORM RECORDS ARE CHECKED FOR PRESENCE IN THE STORM -C CATALOG UPDATED AND ADDED IF NECESSARY. -C -C PROGRAM HISTORY LOG: -C 1992-03-25 S. LORD -C 1992-08-25 S. LORD ADDED IER RETURN CODE -C -C USAGE: CALL RCNCIL(IUNTCA,IUNTCN,IUNTAL,NTEST,NOKAY,NBAD,MAXREC, -C MAXCKS,IEFAIL,IER,IECAT,NUMTST,NUMOKA,NUMBAD, -C TSTREC,BADREC,OKAREC) -C INPUT ARGUMENT LIST: -C IUNTCA - UNIT NUMBER FOR THE STORM CATALOG. -C -C IUNTCN - UNIT NUMBER FOR THE TEMPORARY CATALOG -C -C IUNTAL - UNIT NUMBER FOR ALIAS FILE. -C NTEST - NUMBER OF CURRENT RECORDS TO BE TESTED. -C MAXREC - MAXIMUM NUMBER OF RECORDS (STORAGE FOR ARRAYS) -C MAXCKS - MAXIMUM NUMBER OF ERROR CHECKS (STORAGE FOR ARRAYS) -C IEFAIL - ARRAY CONTAINING ERROR CODES FOR ERROR CHECKS -C NUMTST - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH RECORD -C - TO BE TESTED. -C IOVRLP - SCRATCH ARRAY. -C TSTREC - CHARACTER ARRAY CONTAINING RECORDS TO BE TESTED. -C -C OUTPUT ARGUMENT LIST: -C NOKAY - NUMBER OF RECORDS THAT PASSED THE RSMC CHECK. -C NBAD - NUMBER OF RECORDS THAT FAILED THE RSMC CHECK. -C IER - ERROR RETURN CODE. 0 EXCEPT IF LOGICAL INCONSISTENCY -C FOUND. -C IECAT - INTEGER ARRAY CONTAINING ERROR CODE FOR EACH INPUT -C - RECORD. SEE COMMENTS IN PGM FOR KEY TO ERROR CODES. -C NUMOKA - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH GOOD -C - RECORD. -C NUMBAD - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH BAD -C - RECORD. -C BADREC - CHARACTER ARRAY CONTAINING BAD RECORDS THAT FAILED -C - THE RSMC CHECK. -C OKAREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT PASSED -C - THE RSMC CHECK. -C -C INPUT FILES: -C UNIT 25 - ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C - FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C - DCB: LRECL=255, BLKSIZE=23400, RECFM=VB -C UNIT 26 - NEW ALIAS FILE CONTAINING EQUIVALENT STORM IDS -C - FOR STORMS THAT HAVE BEEN REPORTED BY MULTIPLE RSMC'S -C UNIT 27 - STORM CATALOG FILE -C - DCB: LRECL=255, BLKSIZE=23400, RECFM=VB -C UNIT 28 - SCRATCH STORM CATALOG FILE -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 27 - SAME AS ABOVE -C UNIT 28 - SAME AS ABOVE -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE RCNCIL(IUNTCA,IUNTCN,IUNTAL,NTEST,NOKAY,NBAD,MAXREC, - 1 MAXCKS,IEFAIL,IER,IECAT,NUMTST,NUMOKA,NUMBAD, - 2 TSTREC,BADREC,OKAREC) - - PARAMETER (NERCRC=3) - PARAMETER (MAXSTM=70) - PARAMETER (NOVRMX=MAXSTM) - PARAMETER (NADDMX=10) - - CHARACTER*(*) TSTREC(0:NTEST),BADREC(MAXREC),OKAREC(NOKAY), - 1 ERCRCN(NERCRC)*60 - character stnmal*9,stidal*3,rsmcal*4,stnmca*9,stidca*3,rsmcca*4, - 1 stidad*3,rsmcad*4 - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - PARAMETER (NBASIN=11) - PARAMETER (NRSMCX=4) - - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 LATNS*1,LONEW*1,FMTVIT*6,BUFINZ*100,RELOCZ*1,IDBASN*1, - 2 RSMCID*4,RSMCAP*1 - - DIMENSION IVTVAR(MAXVIT),ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION BUFIN(MAXCHR),IDBASN(NBASIN), - 1 FMTVIT(MAXVIT),RSMCID(NRSMCX),RSMCAP(NRSMCX) - - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - DIMENSION IVTVRX(MAXVIT) - - DIMENSION RINC(5) - - CHARACTER BUFCK(MAXCHR)*1,RSMCX*4,RELOCX*1,STMIDX*3,BUFINX*100, - 1 STMNMX*9,LATNSX*1,LONEWX*1 - - DIMENSION IEFAIL(MAXREC,0:MAXCKS),IECAT(MAXREC),NUMOKA(NOKAY), - 1 NUMBAD(MAXREC),NUMTST(NTEST),MAXNO(NBASIN) - - dimension rsmcal(novrmx),stidal(novrmx), - 1 rsmcca(novrmx),stidca(novrmx), - 2 rsmcad(naddmx),stidad(naddmx) - - EQUIVALENCE (BUFCK(1),RSMCX),(BUFCK(5),RELOCX),(BUFCK(6),STMIDX), - 1 (BUFCK(1),BUFINX),(BUFCK(10),STMNMX), - 2 (BUFCK(35),LATNSX),(BUFCK(41),LONEWX) - - EQUIVALENCE (IVTVRX(1),IDATEX),(IVTVRX(2),IUTCX) - - DATA FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 1 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 2 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 3 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/ - - DATA IDBASN/'L','E','C','W','O','T','U','P','S','B','A'/ - - DATA RSMCID/'NHC ','JTWC','ADRM','JMA '/, - 1 RSMCAP/'N','W','A','J'/ - - data maxno/nbasin*0/,minday/-1/,maxday/1/ - - DATA ERCRCN - 1 /'10: NEW STORM, ADD TO CATALOG ', - 2 '20: DUP. STORM ID IN CATALOG. CREATE NEW ID, APPEND CATALOG ', - 3 '30: STORM FOUND IN CATALOG, UPDATE CATALOG ENTRY '/ - - write(6,1) nokay - 1 format(//'...Entering rcncil to reconcile catalog, alias file ', - 1 'and new records. Number of okay records=',i4/4x,'Codes', - 2 ' are:'/10x,'1: No catalog entry'/13x,'Action: Append ', - 3 'catalog (first time appearance), record unchanged'/10x, - 4 '2: Duplicate storm id to primary catalog id'/13x, - 5 'Action: Find new, unique id which is one more than the', - 6 'largest id for that basin, modify record, append to ', - 7 'catalog'/10x,'3: Storm found in catalog,'/13x,'Action:', - 8 'update catalog entry') - rewind iuntca - rewind iuntcn - ncat=0 - ipack=10*maxrec - nadd=0 - ier=0 - - write(6,3) - 3 format(/'...Input records are:') - - do iec=1,ntest - iecat(iec)=ipack - write(6,5) iec,numtst(iec),tstrec(iec) - 5 format('...',i4,'...',i5,'...',a) - - enddo - - call sclist(iuntca) - call aklist(iuntal) - -c First pass through catalog to determine what should be done - - 20 continue - READ(IUNTCA,21,END=90) NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - 21 FORMAT(I1,1X,A9,2(1X,I8,1X,I4.4),10(1X,A4,1X,A3)) - ncat=ncat+1 - -c Determine maximum storm id in each basin from the catalog - - read(stidca(1)(1:2),23) idno - 23 format(i2) - do nb=1,nbasin - if(stidca(1)(3:3) .eq. idbasn(nb)) then - maxno(nb)=max0(maxno(nb),idno) - go to 31 - endif - enddo - 31 continue - -c Determine the catalog code for each record -c Codes and actions are: - -c Code 1: No catalog entry -c Action: Append catalog (first time appearance), record unchanged - -c Code 2: Duplicate storm id to primary catalog id, storm not -c found in catalog -c Action: Find new, unique id which is one more than the largest -c id for that basin, modify record, append to catalog - -c Code 3: Storm found in catalog -c Action: Update catalog date and other entries if necessary - -c Notes: codes from 1-3 are in order of increasing priority so that -c a code of 2 can be overridden by a code of 3 -c A final check on the consistency between the catalog and the alias -c (akavit) file is made. Any inconsistency is resolved in favor of t -c catalog but is flagged by a positive error code even though the -c record is retained. - -c Codes are packed so that the appropriate record number in the -c catalog is recoverable. Packing depends on maxrec, which -c should be a 4 digit number (1000 should work fine). - - do 80 nrec=1,ntest - -c Look at okay records and bad records with overland error codes. -c An error code for the rsmcck of 22 forces a look at the -c alias file since an entry has been made already. - - if(nrec .le. nokay .or. - 1 (nrec .gt. nokay .and. (iefail(numtst(nrec),4) .eq. 5 .or. - 2 iefail(numtst(nrec),4) .eq. 6 .or. - 3 iefail(numtst(nrec),6) .eq. 22))) then - - bufinz=tstrec(nrec) - - if(rsmcz(1:1) .ne. '!' .and. iefail(numtst(nrec),6) .ne. 22) - 1 then - nalsav=1 - stnmal=stmnmz - rsmcal(1)=rsmcz - stidal(1)=stmidz - - else -c write(6,35) nrec,stmnmz,rsmcz,stmidz -c 35 format('...Calling akafnd for record',i4,' with storm name,', -c 1 'rsmc,id=',3(a,1x),' to find all aliases.') - nalsav=novrmx - call akafnd(iuntal,stmnmz,rsmcz,stmidz,nalsav,stnmal,rsmcal, - 1 stidal,ifnd) - - if(ifnd .eq. 0) then - write(6,37) stmnmz,stmidz,rsmcz - 37 format('******Bang or overlapped storm not found in akavit file ', - 1 'when finding aliases. stmnmz,stmidz,rsmcz=',3(1x,a), - 2 ' abort') -c call abort1(' RCNCIL',37) - endif - - endif - - do nal=1,nalsav - -c Code 3: - -c if the record is nameless the entire storm id and rsmc -c must match - - IF(STMNMZ .NE. 'NAMELESS') THEN - - if(stnmca .eq. stnmal .and. - 1 stidca(1)(3:3) .eq. stidal(nal)(3:3)) then - iecat(nrec)=3*ipack+ncat - write(6,43) nrec,stnmal,stidal(nal),rsmcal(nal),iecat(nrec) - 43 format('...For nrec=',i5,' storm named=',a,' with id,rsmc=', - 1 2(a,1x),' is in catalog, iecat=',i6) - go to 80 - endif - ENDIF - - do nca=1,nalca - if(rsmcal(nal) .eq. rsmcca(nca) .and. - 1 stidal(nal) .eq. stidca(nca)) then - iecat(nrec)=3*ipack+ncat - write(6,47) nrec,nca,stnmal,stidal(nal),rsmcal(nal),iecat(nrec) - 47 format('...For nrec,nca=',2i5,' storm named=',a,' with id,rsmc=', - 1 2(a,1x),' is in catalog, iecat=',i6) - go to 80 - endif - enddo - enddo - - -c Code 2: now there is no exact match to the catalog - make sure the -c won't be a duplicate storm id - -c Possibilities are: -c 1) If both record and catalog are bang, RSMCCK may have changed th -c rsmc (e.g. added a new observing rsmc). We assume the storm is -c in the catalog (code 3). -c 2) If the catalog is a bang, and the record is not, the record is -c new storm (code 2) or the records has been processed by rsmcc -c but not yet by rcncil. Check the AKAVIT file and adjust the -c code accordingly. -c 3) Neither record or catalog entry is a bang (code 2). - - if(stmidz .eq. stidca(1)) then - - if(rsmcz(1:1) .eq. '!' .and. - 1 rsmcca(1)(1:1) .eq. '!') then - iecatz=3 - write(6,71) nrec,stmidz,ncat,rsmcz,rsmcca(1) - 71 format(/'...For nrec=',i5,' only storm id=',a,' matches catalog ', - 1 'entry',i5,'. Record and catalog rsmcs are both bang:', - 2 2(1x,a)/4x,'###This case should never happen!') - - else if(rsmcz(1:1) .ne. '!' .and. - 1 rsmcca(1)(1:1) .eq. '!') then - - write(6,73) nrec,stmidz,rsmcz,rsmcca(1),stmnmz,rsmcz,stmidz - - 73 format('...For nrec=',i5,' only storm id=',a,' matches catalog ', - 1 'entry.'/4x,'...Record rsmc (',a,') is not bang but ', - 2 'catalog rsmc is (',a,').'/4x,'...Calling akafnd with ', - 3 'storm name, rsmc, id=',3(a,1x),' to find all aliases.') - - nalsav=novrmx - call akafnd(iuntal,stmnmz,rsmcz,stmidz,nalsav,stnmal,rsmcal, - 1 stidal,ifnd) - if(ifnd .eq. 1) then - write(6,75) - 75 format(3x,'...Record found in alias file. Code 3 assigned.') - iecatz=3 - - else - write(6,77) - 77 format(3x,'...Record not found in alias file. Code 2 retained.') - iecatz=2 - endif - - else - iecatz=2 - write(6,79) nrec,stmidz,ncat,rsmcz,rsmcca(1) - 79 format(/'...For nrec=',i5,' only storm id=',a,' matches catalog ', - 1 'entry',i5,'. Rsmcs are:',2(1x,a)/4x,' ###Probable new ', - 2 'storm with a duplicate storm id') - endif - - iecat(nrec)=max0(iecat(nrec)/ipack,iecatz)*ipack+ncat - endif - - endif - 80 continue - -c Write to the scratch catalog - - WRITE(IUNTCN,21) NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - go to 20 - 90 continue - - if(ncat .eq. 0) then - write(6,91) - 91 format(/'...There are no catalog entries. All input records will', - 1 ' be assigned code 1.') - iecat(1:ntest)=ipack - - endif - - write(6,131) - 131 format('...Summary of catalog codes for first scan:') - do nrec=1,ntest - if(nrec .le. nokay .or. - 1 (nrec .gt. nokay .and. (iefail(numtst(nrec),4) .eq. 5 .or. - 2 iefail(numtst(nrec),4) .eq. 6 .or. - 3 iefail(numtst(nrec),6) .eq. 22))) then - write(6,133) nrec,iecat(nrec),tstrec(nrec) - 133 format(4x,2i6,1x,'...',a,'...') - if(iabs(iefail(numtst(nrec),5)) .le. 9) then - iefail(numtst(nrec),5)=-(iabs(iefail(numtst(nrec),5))+ - 1 iabs(iecat(nrec))/ipack*10) - endif - endif - enddo - write(6,143) (nb,idbasn(nb),maxno(nb),nb=1,nbasin) - 143 format('...Summary of maximum storm ids for each basin:'/(4x,i3, - 1 1x,a,i4)) - -c Second pass: copy back from the scratch catalog and update -c each entry as needed - - rewind iuntca - rewind iuntcn - ncat=0 - - 201 continue - READ(IUNTCN,21,END=300) NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - ncat=ncat+1 - -c *********************** -c **** Code 3 errors **** -c *********************** - - do nrec=1,ntest - - if(nrec .le. nokay .or. - 1 (nrec .gt. nokay .and. (iefail(numtst(nrec),4) .eq. 5 .or. - 2 iefail(numtst(nrec),4) .eq. 6 .or. - 3 iefail(numtst(nrec),6) .eq. 22))) then - - bufinz=tstrec(nrec) - ietyp=iecat(nrec)/ipack - ircat=iecat(nrec)-ietyp*ipack - - if(ircat .eq. ncat .and. ietyp .eq. 3) then - - write(6,213) nrec,bufinz,NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - 213 format(/'...Preparing to reconcile code 3 errors for nrec=',i3, - 1 ' record, catalog entry are:'/4x,a,'...'/4x,i1,1x,a9,2(1x, - 2 i8,1x,i4.4),10(1x,a4,1x,a3)) - - IF(STMNMZ .NE. 'NAMELESS' .AND. STNMCA .EQ. 'NAMELESS') THEN - write(6,217) stnmca,ncat,stmnmz,nrec - 217 format('...',a,' storm with catalog entry=',i4,' will have name=', - 1 a,' assigned, nrec=',i4) - STNMCA=STMNMZ - ENDIF - - do iv=1,2 - call decvar(istvar(iv),ienvar(iv),ivtvar(iv),ierdec,fmtvit(iv), - 1 bufinz) - enddo - - call mnmxda(iymdmn,iutcmn,idatez,iutcz,dayz,minday) - call mnmxda(iymdmx,iutcmx,idatez,iutcz,dayz,maxday) - daysav=dayz - ilate=nrec - -c Do all records identified as the same storm - - do nchk=nrec+1,ntest - - if(nchk .le. nokay .or. - 1 (nchk .gt. nokay .and. (iefail(numtst(nchk),4) .eq. 5 .or. - 2 iefail(numtst(nchk),4) .eq. 6 .or. - 3 iefail(numtst(nchk),6) .eq. 22))) then - - bufinx=tstrec(nchk) - ietypx=iecat(nchk)/ipack - ircatx=iecat(nchk)-ietyp*ipack - - if(ircatx .eq. ncat .and. ietypx .eq. 3) then - - IF(STMNMX .NE. 'NAMELESS' .AND. STNMCA .EQ. 'NAMELESS') THEN - write(6,227) stnmca,ncat,stmnmx,nchk - 227 format('...',a,' storm with catalog entry=',i4,' will have name=', - 1 a,' assigned, nchk=',i4) - STNMCA=STMNMX - ENDIF - - do iv=1,2 - call decvar(istvar(iv),ienvar(iv),ivtvrx(iv),ierdec,fmtvit(iv), - 1 bufinx) - enddo - -c write(6,231) nchk,iymdmn,iutcmn,idatex,iutcx,bufinx -c 231 format('...calling mnmxda with nchk,iymdmn,iutcmn,idatex,iutcx,' -c 1 'bufinx=',i4,i9,i6,i7,i6/4x,a) - call mnmxda(iymdmn,iutcmn,idatex,iutcx,dayz,minday) - call mnmxda(iymdmx,iutcmx,idatex,iutcx,dayz,maxday) - if(dayz .gt. daysav) then - daysav=dayz - ilate=nchk - endif - - iecat(nchk)=-iabs(iecat(nchk)) - endif - endif - enddo - -c Look in akavit for the storm. If it is there, extract -c latest pertinent information that will be transferred to the -c storm catalog - - write(6,243) ilate,stmnmz,rsmcz,stmidz - 243 format('...Look in akavit for appropriate information. Latest ', - 1 'record has index=',i5,' storm name,rsmc,id=',3(a,1x)) - - nalsav=novrmx - call akafnd(iuntal,stmnmz,rsmcz,stmidz,nalsav,stnmca,rsmcal, - 1 stidal,ifnd) - - if(ifnd .eq. 0) then - if(rsmcz(1:1) .eq. '!') then - write(6,271) stmnmz,stmidz,rsmcz - 271 format('******Storm not found in akavit file. stmnmz,stmidz,', - 1 'rsmcz=',3(1x,a),' abort') - call abort1(' RCNCIL',271) - - else - write(6,273) ilate - 273 format('...Storm is not multiply observed. We copy the latest ', - 1 'record (#',i5,') to get the latest information.') - bufinx=tstrec(ilate) - nalca=1 - rsmcca(1)=rsmcx - stidca(1)=stmidx - if(stmnmx .ne. 'NAMELESS') stnmca=stmnmx - endif - - else - write(6,277) - 277 format('...Storm is multiply observed. We copy the alias record ', - 1 'to get the latest information.') - -c Do not copy the storm id if there is already a catalog entry - - nalca=nalsav - rsmcca(1)=rsmcal(1) - rsmcca(2:nalca)=rsmcal(2:nalca) - stidca(2:nalca)=stidal(2:nalca) - endif - - iecat(nrec)=-iabs(iecat(nrec)) - - endif - endif - enddo - -c write to the updated catalog - - WRITE(IUNTCA,21) NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - WRITE(6,293) NCAT,NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - 293 format(/'...CATALOG RECORD ',I3,' WRITTEN. RECORD IS:',I1,1X,A9, - 1 2(1X,I8,1X,I4.4),10(1X,A4,1X,A3)) - go to 201 - - 300 continue - -c **************************** -c **** Code 1 or 2 errors **** -c **************************** - -c Add new storms to the catalog or storms that have duplicate -c ids - - nadcat=0 -c** naladd=0 - do nrec=1,ntest - - if(nrec .le. nokay .or. - 1 (nrec .gt. nokay .and. (iefail(numtst(nrec),4) .eq. 5 .or. - 2 iefail(numtst(nrec),4) .eq. 6 .or. - 3 iefail(numtst(nrec),6) .eq. 22))) then - - bufinz=tstrec(nrec) - ietyp=iecat(nrec)/ipack - - if(ietyp .eq. 1 .or. ietyp .eq. 2) then - write(6,303) nrec,ietyp,bufinz - 303 format(//'...Ready to add new storm to catalog. nrec,ietyp,', - 1 'record are:',2i4/4x,a) - -c Default entry for catalog is a copy of the candidate record or the -c entry from the alias (akavit) file. These entries may be -c updated by records with a later date, entries from the -c alias file, and the need to create a new, unique storm id. - - if(rsmcz(1:1) .ne. '!') then - nalca=1 - stnmca=stmnmz - rsmcca(1)=rsmcz - stidca(1)=stmidz - - else - write(6,305) nrec,stmnmz,rsmcz,stmidz - 305 format('...Calling akafnd for record',i4,' with storm name,', - 1 'rsmc,id=',3(a,1x),' to produce default catalog entries.') - nalsav=novrmx - call akafnd(iuntal,stmnmz,rsmcz,stmidz,nalsav,stnmca,rsmcca, - 1 stidca,ifnd) - nalca=nalsav - - if(ifnd .eq. 0) then - write(6,307) stmnmz,stmidz,rsmcz - 307 format('******Storm not found in akavit file. stmnmz,stmidz,', - 1 'rsmcz=',3(1x,a),' abort') - call abort1(' RCNCIL',307) - endif - endif - - read(stmidz(1:2),23) idno - do nb=1,nbasin - if(stmidz(3:3) .eq. idbasn(nb)) then - nbasav=nb - go to 311 - endif - enddo - 311 continue - - istidn=0 - if(idno .le. maxno(nbasav)) then - istidn=1 - write(6,313) idno,maxno(nbasav) - 313 format('###Storm id number=',i3,' is not larger than catalog ', - 1 'maximum. A new number and storm id must be created=',i4) - endif - - do naddc=1,nadcat - if(stmidz .eq. stidad(naddc)) then - istidn=1 - write(6,315) stmidz - 315 format('...Current storm id has already been added to catalog. A', - 1 ' unique one must be created.') - endif - enddo - -c Create added storm id and rsmc in advance to guarantee uniqueness -c or transfer new storm id to the catalog record. -c istidn=0 : no uniqueness problem has been detected -c istidn=1 : uniqueness problem detected and new id will -c be created -c The new id will be transferred to all records. It must be a bang -c record with only one observing rsmc. It must also be entered int -c the alias file. - - istidn=0 ! Qingfu added to skip the changes of storm ID number - - if(istidn .eq. 1) then - - if(rsmcz(1:1) .eq. '!') then - write(6,331) stmidz,rsmcz,bufinz - 331 format('###Storm with id, rsmc=',2(a,1x),'is a duplicate to a ', - 1 'catalog entry as well as being a bang storm. Record is:'/ - 2 4x,a) - write(6,333) - 333 format('******This problem is not yet coded. Abort') - call abort1(' rcncil',333) - - else - idnomx=-1 - do naddc=1,nadcat - read(stidad(naddc)(1:2),23) idno - if(stidad(naddc)(3:3) .eq. idbasn(nbasav)) - 1 idnomx=max0(idnomx,idno) - enddo - stidad(nadcat+1)(3:3)=idbasn(nbasav) - - if(idnomx .ge. 0) then - write(stidad(nadcat+1)(1:2),3401) idnomx+1 - 3401 format(i2.2) - write(6,341) idbasn(nbasav),stidad(nadcat+1) - 341 format('...Previous storms have been added for basin ',a,' storm', - 1 ' id set to one more than the maximum already added to ', - 2 'the catalog=',a) - else - write(stidad(nadcat+1)(1:2),3401) maxno(nbasav)+1 - write(6,343) idbasn(nbasav),stidad(nadcat+1) - 343 format('...No previous storms added for basin ',a,'. Storm id ', - 1 'set to one more than the maximum already in the catalog=', - 2 a) - endif - -c Create a bang record with one observing rsmc - -c** naladd=naladd+1 - do nrsz=1,nrsmcx - if(rsmcid(nrsz) .eq. rsmcz) then - nrsmc=nrsz - go to 351 - endif - enddo - 351 continue - nalca=2 - rsmcad(nadcat+1)='!'//rsmcap(nrsmc) - stidca(1)=stidad(nadcat+1) - rsmcca(1)=rsmcad(nadcat+1) - stidca(2)=stmidz - rsmcca(2)=rsmcz -c** write(6,355) naladd,(stidca(nca),rsmcca(nca),nca=1,nalca) - write(6,355) nadcat+1,(stidca(nca),rsmcca(nca),nca=1,nalca) - 355 format('...New bang storm (#',i2,') created with unique id. Id, ', - 1 'rsmc are:'/(4x,2(a,3x))) -c** call akasav(naladd,nalca,dayz,stmnmz,rsmcca,stidca) - - endif - - endif - - do iv=1,2 - call decvar(istvar(iv),ienvar(iv),ivtvar(iv),ierdec,fmtvit(iv), - 1 bufinz) - enddo - idatmn=idatez - iutcmn=iutcz - idatmx=idatez - iutcmx=iutcz - call ztime(idatez,iutcz,iyr,imo,ida,ihr,imin) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - call flday(jdy,ihr,imin,daysav) - ilate=nrec - -C####################################################################### - -c Do all records identified as the same storm - - do nchk=nrec+1,ntest - -C----------------------------------------------------------------------- - if(nchk .le. nokay .or. - 1 (nchk .gt. nokay .and. (iefail(numtst(nchk),4) .eq. 5 .or. - 2 iefail(numtst(nchk),4) .eq. 6 .or. - 3 iefail(numtst(nchk),6) .eq. 22))) then - - imatch=0 - - bufinx=tstrec(nchk) - ietypx=iecat(nchk)/ipack - -C....................................................................... - if(ietypx .eq. 1 .or. ietypx .eq. 2) then - - ifnd=0 - -c Storms are obviously the same - - if(stmidz .eq. stmidx .and. rsmcz .eq. rsmcx) then - write(6,371) nchk,nrec,nrec,bufinz,nchk,bufinx - 371 format('...Record',i5,' has the same storm id and rsmc as the ', - 1 'candidate record (#',i5,'). Records are:'/4x,i4,1x,a/4x, - 2 i4,1x,a) - ifnd=-1 - -c Last resort: look in akavit for the storm - - else - write(6,373) nchk,stmnmx,rsmcx,stmidx - 373 format('...calling akafnd for record',i4,' with storm name,rsmc,', - 1 'id=',3(a,1x)) - nalsav=novrmx - call akafnd(iuntal,stmnmx,rsmcx,stmidx,nalsav,stnmal, - 1 rsmcal,stidal,ifnd) - - if(ifnd .eq. 0) then - - if(rsmcx(1:1) .eq. '!') then - write(6,381) stmnmx,stmidx,rsmcx - 381 format('******Storm not found in akavit file. stmnmx,stmidx,', - 1 'rsmcx=',3(1x,a),' abort') - call abort1(' RCNCIL',381) - else -c write(6,383) -c 383 format('...Storm does not have a bang rsmc. It is therefore not ', -c 1 'required to find a match.') - endif - - else - write(6,405) ifnd - 405 format('...Storm found in akavit file at record #',i3) - do nal=1,nalsav - if(rsmcz .eq. rsmcal(nal) .and. - 1 stmidz .eq. stidal(nal)) then - imatch=1 - go to 411 - endif - enddo - 411 continue - endif - - endif - - if(imatch .eq. 1 .or. ifnd .eq. -1) then - write(6,413) ifnd,imatch - 413 format('...Storm matches exactly or by catalog association, ', - 1 'ifnd,imatch=',2i3) - do iv=1,2 - call decvar(istvar(iv),ienvar(iv),ivtvrx(iv),ierdec, - 1 fmtvit(iv),bufinx) - enddo - -c write(6,231) nchk,idatmn,iutcmn,idatex,iutcx,bufinx - call mnmxda(idatmn,iutcmn,idatex,iutcx,dayz,minday) - call mnmxda(idatmx,iutcmx,idatex,iutcx,dayz,maxday) - if(dayz .gt. daysav) then - daysav=dayz - ilate=nchk - endif - - if(istidn .eq. 1) then - tstrec(nchk)=bufinx - nadd=nadd+1 - badrec(nbad+nadd)=bufinx - numbad(nbad+nadd)=numtst(nchk) - iefail(numbad(nbad+nadd),5)= - 1 -iabs(iefail(numtst(nchk),5)) - stmidx=stidad(nadcat+1) - rsmcx =rsmcad(nadcat+1) - write(6,473) stmidx,bufinx,nadd,badrec(nbad+nadd) - 473 format('...Record same as candidate record to be added to ', - 1 'catalog. New storm id=',a,' is assigned. Modified ', - 2 'record is:'/4x,a/4x,'Bad record #',i3,' added is:'/4x,a) - endif - - iecat(nchk)=-iabs(iecat(nchk)) - if(nchk .le. nokay) then - okarec(nchk)=bufinx - else - badrec(nchk-nokay)=bufinx - endif - - endif -C....................................................................... - -c Exact match: substitute storm name if it is not nameless - - if(ifnd .eq. -1) then - - if(stmnmx.ne.'NAMELESS' .and. stmnmz.eq.'NAMELESS') then - stnmca=stmnmx - write(6,475) stnmca - 475 format('...NAMELESS candidate record is renamed to ',a,'from a ', - 1 'matching record.') - endif - -c Match through the alias file: copy alias information for the -c catalog entry - - else if(imatch .eq. 1) then - if(stmnmz.eq.'NAMELESS' .and. stnmal.ne.'NAMELESS') then - stnmca=stnmal - write(6,477) stnmca - 477 format('...NAMELESS candidate record is renamed to ',a,'from a ', - 1 'matching alias record.') - endif - - nalca=nalsav - rsmcca(1:nalca)=rsmcal(1:nalca) - stidca(1:nalca)=stidal(1:nalca) - - else - write(6,491) ifnd,imatch - 491 format('...Storm does not match exactly or by catalog ', - 1 'association, ifnd,imatch=',2i3) - endif - - endif - endif -C----------------------------------------------------------------------- - enddo -C####################################################################### - - if(iecat(nrec) .gt. 0) then - nadcat=nadcat+1 - - if(nadcat .gt. naddmx) then - write(6,505) nadcat,naddmx - 505 format('******Trying to add too many storms to the catalog,', - 1 ' nadcat,naddmx=',2i3) - call abort1(' RCNCIL',505) - endif - - if(istidn .eq. 1) then - nadd=nadd+1 - badrec(nbad+nadd)=bufinz - numbad(nbad+nadd)=numtst(nrec) - iefail(numbad(nbad+nadd),5)=-iabs(iefail(numtst(nrec),5)) - write(6,511) nadd,nrec,nbad+nadd,numtst(nrec) - 511 format(/'...Adding a new bad record due to duplicate storm id, ', - 1 'nadd,nrec,nbad+nadd,numtst=',4i4) - - stmidz=stidad(nadcat) - rsmcz =rsmcad(nadcat) - write(6,513) stidca(1),nalca,bufinz - 513 format('...Id for storm added to catalog =',a,' is new and ', - 1 'unique. nalca=',i3,' Record is:'/4x,a) - - else - stidad(nadcat)=stidca(1) - write(6,515) stidad(nadcat) - 515 format('...Id for storm added to catalog =',a,' has been ', - 1 'recorded to prevent duplication.') - endif - - WRITE(IUNTCA,21) NALCA,STNMCA,IDATMN,IUTCMN,IDATMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - WRITE(6,293) NCAT+NADCAT,NALCA,STNMCA,IDATMN,IUTCMN,IDATMX, - 1 IUTCMX,(RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - endif - - if(nrec .le. nokay) then - okarec(nrec)=bufinz - else - badrec(nrec-nokay)=bufinz - endif - - iecat(nrec)=-iabs(iecat(nrec)) - endif - endif - - enddo -c** write(6,601) nadcat,naladd -c 601 format('...',i3,' new storms added to catalog. ',i3,' bang ', -c 1 'storms added to temporary alias file.'/4x,'Dump alias ' -c 2 'records to temporary alias file if necessary (naladd>0).' - write(6,601) nadcat - 601 format('...',i3,' new storms added to catalog.') - -c Finally, storm catalog and alias file (akavit) reconciliation. -c We force the alias file to be a direct subset of the storm -c catalog. - -c write(6,703) -c 703 format(/'...Storm catalog and alias file reconciliation. '/4x, -c 1 'Copy temporary alias file records to the new alias file', -c 2 ' if necessary.') - - iuntaw=iuntal - rewind iuntca - rewind iuntaw - - 720 read(iuntca,21,end=830) nalca,stmnmz,iymdmn,iutcmn,iymdca,iutcca, - 1 (rsmcca(nca),stidca(nca), - 2 nca=1,min(nalca,novrmx)) - if(rsmcca(1)(1:1) .eq. '!') write(iuntaw,711) nalca,stmnmz, - 1 (rsmcca(nca),stidca(nca), - 2 nca=1,min(nalca,novrmx)) - 711 format(i1,1x,a9,10(1x,a4,1x,a3)) - -c** ifndca=0 - -c if(stmnmz .eq. stnmal .and. -c 1 stidca(1) .eq. stidal(1)) then -c ifndz=0 -c write(6,801) stmnmz,stidca(1) -c 801 format('...Alias file and catalog have the same storm and basin ', -c 1 'id=',a,1x,a) - -c do nc=1,nalca -c if(rsmcal(nc) .eq. rsmcca(nc) .and. -c 1 stidal(nc) .eq. stidca(nc)) then -c ifndz=ifndz+1 -c endif -c enddo - -c if(ifndz .eq. nalca) then -c ifndca=1 -c go to 831 -c endif -c** endif - - go to 720 - 830 continue -cc831 continue - -c** if(ifndca .eq. 0) then -c write(6,833) nalca,stmnmz,(rsmcca(nca),stidca(nca), -c 1 nca=1,min(nalca,novrmx)) -c write(6,835) nalmx,stnmal,(rsmcal(nal),stidal(nal), -c 3 nal=1,min(nalmx,novrmx)) -c 833 format('******Storm in alias file but different or not in ', -c 1 'catalog. Catalog entry is:'/4x,i1,1x,a9,10(1x,a4,1x,a3) -c 835 format('Alias entry is:'/4x,i1,1x,a9,10(1x,a4,1x,a3)) -c call abort1(' RCNCIL',835) - -c else -c write(6,841) nalmx,stnmal,(rsmcal(nal),stidal(nal), -c 1 nal=1,min(nalmx,novrmx)) -c 841 format('...Alias file entry is identical to catalog. Entry is:'/ -c 1 4x,i1,1x,a9,10(1x,a4,1x,a3)) -c endif -c** go to 710 - -c Error summary - - write(6,901) nokay,ntest,nadd,(ercrcn(ner),ner=1,nercrc) - 901 format(//'...Results of the catalog reconciliation check are: ', - 1 'nokay=',i4,', ntest=',i4,', nadd=',i3//4x,'Error codes ', - 2 'are:'/(6x,a)) - write(6,903) - 903 format(/'...Okay records are:',100x,'erc'/) - do nok=1,nokay - write(6,909) nok,numoka(nok),okarec(nok),iefail(numoka(nok),5) - 909 format(3x,i4,'...',i4,'...',a,'...',i3) - enddo - - write(6,913) - 913 format(/'...Updated overland or overlapped (bad) records are:', - 1 68x,'erc') - do nba=1,nbad - if(iefail(numbad(nba),4) .eq. 5 .or. - 1 iefail(numbad(nba),4) .eq. 6 .or. - 2 iefail(numbad(nba),6) .eq. 22) then - write(6,919) nba,numbad(nba),badrec(nba),iefail(numbad(nba),5) - 919 format(3x,i4,'...',i4,'...',a,'...',i3) - endif - enddo - - write(6,923) - 923 format(/'...Added records due to duplicate storm id are:',73x, - 1 'erc'/) - do nad=1,nadd - write(6,929) nad,numbad(nbad+nad),badrec(nbad+nad), - 1 iabs(iefail(numbad(nbad+nad),5)) - 929 format(3x,i4,'...',i4,'...',a,'...',i3) - enddo - nbad=nbad+nadd - - return - end - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: MNMXDA SUBSTITUTES MIN OR MAX DATE -C PRGMMR: S. LORD ORG: NP22 DATE: 1993-06-01 -C -C ABSTRACT: SUBSTITUTES MIN OR MAX DATE -C -C PROGRAM HISTORY LOG: -C 1993-06-01 S. LORD -C -C USAGE: CALL MNMXDA(IYMDNX,IUTCNX,IYMDZ,IUTCZ,DAYZ,MINMAX) -C INPUT ARGUMENT LIST: -C IYMDNX - MINIMUM YEAR,MONTH,DAY. -C -C IUTCNX - MINIMUM HOUR (UTC). -C IYMDZ - INPUT YEAR,MONTH,DAY. -C -C IUTCZ - INPUT HOUR (UTC). -C -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - subroutine mnmxda(iymdnx,iutcnx,iymdz,iutcz,dayz,minmax) - - DIMENSION RINC(5) - -c in minmax<0, minimum is returned -c in minmax>0, minimum is returned - - call ztime(iymdnx,iutcnx,iyr,imo,ida,ihr,imin) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - call flday(jdy,ihr,imin,daynx) - - call ztime(iymdz,iutcz,iyr,imo,ida,ihr,imin) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - call flday(jdy,ihr,imin,dayz) - - if(minmax .gt. 0) then - if(dayz .gt. daynx) then - write(6,11) iymdnx,iutcnx,iymdz,iutcz - 11 format('...Substituting maximum date. iymdnx,iutcnx,iymdz,iutcz=', - 1 2(i9,i6.4)) - iymdnx=iymdz - iutcnx=iutcz - else -c write(6,13) iymdnx,iutcnx,iymdz,iutcz -c 13 format('...No substitution of maximum date. iymdnx,iutcnx,iymdz,', -c 1 'iutcz=',2(i9,i6.4)) - endif - - else if(minmax .lt. 0) then - if(dayz .lt. daynx) then - write(6,21) iymdnx,iutcnx,iymdz,iutcz - 21 format('...Substituting minimum date. iymdnx,iutcnx,iymdz,iutcz=', - 1 2(i9,i6.4)) - iymdnx=iymdz - iutcnx=iutcz - else -c write(6,23) iymdnx,iutcnx,iymdz,iutcz -c 23 format('...No substitution of minimum date. iymdnx,iutcnx,iymdz,', -c 1 'iutcz=',2(i9,i6.4)) - endif - - else - write(6,31) minmax - 31 format('******minmax value=',i5,' is improper. abort.') - CALL ABORT1(' MNMXDA',31) - endif - - return - end - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: SCLIST LISTS STORM CATALOG -C PRGMMR: S. LORD ORG: NP22 DATE: 1993-06-01 -C -C ABSTRACT: LISTS STORM CATALOG -C -C PROGRAM HISTORY LOG: -C 1993-06-01 S. LORD -C -C USAGE: CALL SCLIST(IUNTCA) -C INPUT ARGUMENT LIST: -C IUNTCA - UNIT NUMBER FOR CATALOG. -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - subroutine sclist(iuntca) - parameter (novrmx=70) - - character stnmca*9,stidca*3,rsmcca*4 - dimension stidca(novrmx),rsmcca(novrmx) - - rewind iuntca - nrec=0 - - write(6,1) iuntca - 1 format(/'...Storm catalog list for unit ',i3) - 10 continue - READ(IUNTCA,21,END=90) NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - nrec=nrec+1 - 21 FORMAT(I1,1X,A9,2(1X,I8,1X,I4.4),10(1X,A4,1X,A3)) - write(6,23) nrec,NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NAL),STIDCA(NAL), - 2 NAL=1,MIN(NALCA,NOVRMX)) - 23 FORMAT(3x,i4,2x,I1,1X,A9,2(1X,I8,1X,I4.4),10(1X,A4,1X,A3)) - go to 10 - - 90 continue - write(6,91) - 91 format('...End of storm catalog list.'/) - rewind iuntca - return - end - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: AKLIST LISTS ALIAS FILE -C PRGMMR: S. LORD ORG: NP22 DATE: 1993-06-01 -C -C ABSTRACT: LISTS ALIAS FILE -C -C PROGRAM HISTORY LOG: -C 1993-06-01 S. LORD -C -C USAGE: CALL AKLIST(IUNTAL) -C INPUT ARGUMENT LIST: -C IUNTAL - UNIT NUMBER FOR ALIAS FILE. -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - subroutine aklist(iuntal) - parameter (novrmx=70) - - character stnmal*9,stidal*3,rsmcal*4 - dimension stidal(novrmx),rsmcal(novrmx) - - rewind iuntal - nrec=0 - - write(6,1) iuntal - 1 format(/'...Storm alias list for unit ',i3) - 10 continue - READ(IUNTAL,21,END=90) NALAL,STNMAL,(RSMCAL(NAL),STIDAL(NAL), - - 1 NAL=1,MIN(NALAL,NOVRMX)) - nrec=nrec+1 - 21 FORMAT(I1,1X,A9,10(1X,A4,1X,A3)) - write(6,23) nrec,NALAL,STNMAL,(RSMCAL(NAL),STIDAL(NAL), - 1 NAL=1,MIN(NALAL,NOVRMX)) - 23 FORMAT(3x,i4,2x,I1,1X,A9,10(1X,A4,1X,A3)) - go to 10 - - 90 continue - write(6,91) - 91 format('...End of storm alias list.'/) - rewind iuntal - return - end - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: STCATI GETS STORM ID FROM CATALOG -C PRGMMR: S. LORD ORG: NP22 DATE: 1993-06-01 -C -C ABSTRACT: LOOKS FOR GIVEN STORM ID AND RSMC IN CATALOG -C -C PROGRAM HISTORY LOG: -C 1993-06-01 S. LORD -C -C USAGE: CALL STCATI(IUNTCA,STMIDZ,RSMCZ,STMIDX,IFND) -C INPUT ARGUMENT LIST: -C IUNTCA - UNIT NUMBER FOR STORM CATALOG. -C -C STMIDZ - REQUESTED STORM ID. -C RSMCZ - REQUESTED RSMC. -C -C OUTPUT ARGUMENT LIST: -C STMIDX - CATALOGED STORM ID. -C IFND - 1 IF FOUND. -C - THE RSMC CHECK. -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - subroutine stcati(iuntca,stmidz,rsmcz,stmidx,ifnd) - - parameter (novrmx=70) - - dimension rsmcca(novrmx),stidca(novrmx) - - character stmidz*(*),stmidx*(*),rsmcz*(*) - character stnmca*9,stidca*3,rsmcca*4 - - ifnd=0 - rewind iuntca - write(6,1) stmidz,rsmcz - 1 format('...Entering stcati looking for storm id,rsmc=',2(a,2x)) - 10 continue - READ(IUNTCA,21,END=90) NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX, - 1 (RSMCCA(NCA),STIDCA(NCA), - 2 NCA=1,MIN(NALCA,NOVRMX)) - 21 FORMAT(I1,1X,A9,2(1X,I8,1X,I4.4),10(1X,A4,1X,A3)) - do nca=1,min(nalca,novrmx) - if(stmidz .eq. stidca(nca) .and. rsmcz .eq. rsmcca(nca)) then - ifnd=1 - stmidx=stidca(1) - rewind iuntca - return - endif - enddo - go to 10 - - 90 continue - - rewind iuntca - return - end - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: STCATN GETS STORM NAME AND LAST DATE FROM CATLG -C PRGMMR: S. LORD ORG: NP22 DATE: 1993-08-25 -C -C ABSTRACT: LOOKS FOR GIVEN STORM ID AND RSMC IN CATALOG -C -C PROGRAM HISTORY LOG: -C 1993-08-25 S. LORD -C -C USAGE: CALL STCATN(IUNTCA,STMNMZ,IDATEZ,IUTCZ,IFND) -C INPUT ARGUMENT LIST: -C IUNTCA - UNIT NUMBER FOR STORM CATALOG. -C STMNMZ - REQUESTED STORM NAME. -C -C OUTPUT ARGUMENT LIST: -C IDATEZ - LATEST DATE FOUND FOR NAMED STORM. -C IUTCZ - LATEST HHMM FOUND FOR NAMED STORM. -C IFND - 1 IF FOUND. -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE STCATN(IUNTCA,STMNMZ,IDATEZ,IUTCZ,IFND) - - character STMNMZ*(*) - character stnmca*9 - - ifnd=0 - IDATEZ=-999999 - IUTCZ=-999 - rewind iuntca - write(6,1) STMNMZ - 1 format('...Entering stcatn looking for storm name=',a) - 10 continue - READ(IUNTCA,21,END=90) NALCA,STNMCA,IYMDMN,IUTCMN,IYMDMX,IUTCMX - 21 FORMAT(I1,1X,A9,2(1X,I8,1X,I4.4)) - if(STNMCA .eq. STMNMZ) then - ifnd=1 - IDATEZ=IYMDMX - IUTCZ=IUTCMX - endif - go to 10 - - 90 continue - - rewind iuntca - return - end - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: ADFSTF ADDS FIRST OCCURRENCE FLAGS TO RECORDS -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-07 -C -C ABSTRACT: ADDS FIRST OCCURRENCE FLAGS TO RECORDS AS APPROPRIATE, -C EVEN IF A FLAG HAS BEEN CLASSIFIED AS A BAD RECORD. -C -C PROGRAM HISTORY LOG: -C 1991-06-07 S. J. LORD -C 1991-06-07 S. J. LORD DISABLED FIRST FLAGS FOR RELOCATED STORMS -C -C USAGE: CALL ADFSTF(IUNTHA,NOKAY,NBAD,MAXREC,MAXCKS,IECOST,NUMBAD, -c IEFAIL,DUMREC,OKAREC,BADREC) -C INPUT ARGUMENT LIST: -C IUNTHA - UNIT NUMBER FOR THE ALIAS SHORT-TERM HISTORY FILE -C NOKAY - LENGTH OF ARRAY OKAREC -C NBAD - LENGTH OF ARRAY BADREC AND NUMBAD -C MAXREC - LENGTH OF FIRST DIMENSION OF ARRAY IEFAIL -C MAXCKS - LENGTH OF SECOND DIMENSION OF ARRAY IEFAIL -C IECOST - ERROR CODE FOR OVERLAND (COASTAL) TROPICAL CYCLONE -C - POSITIONS -C NUMBAD - ARRAY CONTAINING INDEX NUMBER OF EACH BAD RECORD -C IEFAIL - 2-D ARRAY OF ERROR CODES FOR ALL RECORDS -C DUMREC - DUMMY CHARACTER VARIABLE FOR READING SHORT-TERM -C - HISTORY RECORDS -C OKAREC - CHARACTER ARRAY OF OK RECORDS, RECORDS THAT HAVE -C - PASSES ALL Q/C CHECKS SO FAR -C BADREC - CHARACTER ARRAY OF BAD RECORDS, RECORDS THAT HAVE -C - FAILED AT LEAST ONE Q/C CHECK SO FAR -C -C OUTPUT ARGUMENT LIST: -C DUMREC - DESCRIPTION AS ABOVE -C OKAREC - SAME AS INPUT, EXCEPT FIRST OCCURENCE FLAG MAY HAVE -C - BEEN ADDED -C BADREC - SAME AS INPUT, EXCEPT FIRST OCCURENCE FLAG MAY HAVE -C - BEEN ADDED IN THE CASE OF OVER-LAND (COASTAL) STORMS -C -C INPUT FILES: -C UNIT "IUNTHA" - SHORT-TERM HISTORY FILE -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE ADFSTF(IUNTHA,NOKAY,NBAD,MAXREC,MAXCKS,IECOST,NUMBAD, - 1 IEFAIL,DUMREC,OKAREC,BADREC) - - SAVE - - LOGICAL FOUNDO,FOUNDB - - CHARACTER*(*) DUMREC,OKAREC(NOKAY),BADREC(NBAD) - CHARACTER*100 DUMY2K - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 LATNS*1,LONEW*1,FMTVIT*6,BUFINZ*100,RELOCZ*1 - - DIMENSION IVTVAR(MAXVIT),ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION BUFIN(MAXCHR),FMTVIT(MAXVIT) - - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - DIMENSION IEFAIL(MAXREC,0:MAXCKS),NUMBAD(NBAD) - - DATA FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 1 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 2 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 3 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/, - 4 IFSTFL/19/,ISTID/6/,IENID/8/ - - DATA NUM/1/ - - WRITE(6,1) NOKAY,NBAD,IECOST - 1 FORMAT(/'...ENTERING ADFSTF WITH NOKAY,NBAD,IECOST=',3I4/4X, - 1 'WARNING: FIRST OCCURRENCE FLAGS (FOF) MAY OR MAY NOT BE', - 2 ' PRESENT IN THE ORIGINAL SHORT-TERM ALIAS FILE DUE TO ', - 3 'THIS ROUTINE.'/4X,'RELIABLE FOFS ARE PRESENT ONLY IN ', - 4 'THE ALIAS SHORT-TERM HISTORY FILE.') - -C CHECK EACH ALIAS SHORT-TERM HISTORY RECORD FIRST VERSUS THE -C "OKAY" RECORDS AND SECOND VERSUS THE "BAD" RECORDS THAT -C HAVE ONLY AN OVER COAST ERROR - - DO NOK=1,NOKAY - BUFINZ=OKAREC(NOK) - FOUNDO=.FALSE. - REWIND IUNTHA - NREC=0 - - 10 CONTINUE - - READ(IUNTHA,11,END=90) DUMREC - 11 FORMAT(A) - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - IF(DUMREC(35:35).EQ.'N' .OR. DUMREC(35:35).EQ.'S') THEN - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',DUMREC(20:21),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntha,'; DUMREC-4: ',dumrec - PRINT *, ' ' - DUMY2K(1:19) = DUMREC(1:19) - IF(DUMREC(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = DUMREC(20:100) - DUMREC = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ DUMREC(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntha,'; DUMREC-4: ',dumrec - PRINT *, ' ' - - ELSE IF(DUMREC(37:37).EQ.'N' .OR. DUMREC(37:37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',DUMREC(20:23),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntha,'; DUMREC-4: ',dumrec - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 10 - - END IF - - NREC=NREC+1 - IF(STMIDZ .EQ. DUMREC(ISTID:IENID) .AND. - 1 DUMREC(IFSTFL:IFSTFL) .NE. '*') THEN - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 DUMREC) - ENDDO - IDTDUM=IDATEZ - IUTDUM=IUTCZ - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 OKAREC(NOK)) - ENDDO - -C IF THERE ARE DUPLICATE DATES, THEN WE ASSUME THE OKAY RECORD -C IS AN UPDATED RECORD AND WE TRANSFER THE FIRST OCCURRENCE -C FLAG TO THE UPDATED RECORD. THIS CREATES A PARTIAL -C DUPLICATE RECORD THAT WILL BE DEALT WITH IN RITSTH. - - IF(IDATEZ .EQ. IDTDUM .AND. IUTCZ .EQ. IUTDUM) THEN - OKAREC(NOK)(IFSTFL:IFSTFL)=DUMREC(IFSTFL:IFSTFL) - ELSE - FOUNDO=.TRUE. - ENDIF - ENDIF - -C WRITE(6,87) NOK,FOUNDO,DUMREC,OKAREC(NOK) -C 87 FORMAT('...CHECKING FOR FIRST OCCURRENCE, NOK,FOUNDO,DUMREC,', -C 1 'OKAREC=',I3,1X,L1/4X,A/4X,A) - GO TO 10 - - 90 CONTINUE - -C IF THERE ARE NO MATCHING STORMS IN THE SHORT-TERM HISTORY FILE, -C FIND THE EARLIEST STORM IN THE OKAY RECORDS - - IF(.NOT. FOUNDO) THEN - CALL FSTSTM(NOKAY,NOK,NFIRST,OKAREC) - OKAREC(NFIRST)(IFSTFL:IFSTFL)=':' - ENDIF - - ENDDO - - DO NBA=1,NBAD - - IF(IEFAIL(NUMBAD(NBA),4) .EQ. IECOST) THEN - - DO NCK=1,MAXCKS - IF(NCK .NE. 4 .AND. IEFAIL(NUMBAD(NBA),NCK) .GT. 0) GO TO 200 - ENDDO - - BUFINZ=BADREC(NBA) - REWIND IUNTHA - FOUNDB=.FALSE. - NREC=0 - - 160 CONTINUE - - READ(IUNTHA,11,END=190) DUMREC - NREC=NREC+1 - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - IF(DUMREC(35:35).EQ.'N' .OR. DUMREC(35:35).EQ.'S') THEN - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',DUMREC(20:21),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntha,'; DUMREC-5: ',dumrec - PRINT *, ' ' - DUMY2K(1:19) = DUMREC(1:19) - IF(DUMREC(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = DUMREC(20:100) - DUMREC = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ DUMREC(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntha,'; DUMREC-5: ',dumrec - PRINT *, ' ' - - ELSE IF(DUMREC(37:37).EQ.'N' .OR. DUMREC(37:37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',DUMREC(20:23),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntha,'; DUMREC-5: ',dumrec - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 160 - - END IF - - IF(STMIDZ .EQ. DUMREC(ISTID:IENID) .AND. - 1 DUMREC(IFSTFL:IFSTFL) .NE. '*') THEN - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 DUMREC) - ENDDO - IDTDUM=IDATEZ - IUTDUM=IUTCZ - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BADREC(NBA)) - ENDDO - -C IF THERE ARE DUPLICATE DATES, THEN WE ASSUME THE BAD RECORD -C IS AN UPDATED RECORD AND WE TRANSFER THE FIRST OCCURRENCE -C FLAG TO THE UPDATED RECORD. THIS CREATES A PARTIAL -C DUPLICATE RECORD THAT WILL BE DEALT WITH IN RITSTH. - - IF(IDATEZ .EQ. IDTDUM .AND. IUTCZ .EQ. IUTDUM) THEN - BADREC(NBA)(IFSTFL:IFSTFL)=DUMREC(IFSTFL:IFSTFL) - ELSE - FOUNDB=.TRUE. - ENDIF - ENDIF - -C WRITE(6,187) NBA,DUMREC,BADREC(NBA) -C 187 FORMAT('...CHECKING FOR FIRST OCCURRENCE, NBA,DUMREC,BADREC=',I3/ -C 1 4X,A/4X,A) - GO TO 160 - - 190 CONTINUE - -C IF THERE ARE NO MATCHING STORMS IN THE SHORT-TERM HISTORY FILE, -C FIND THE EARLIEST STORM IN THE BAD RECORDS - - IF(.NOT. FOUNDB) THEN - CALL FSTSTM(NBAD,NBA,NFIRST,BADREC) - BADREC(NFIRST)(IFSTFL:IFSTFL)='*' - ENDIF - - ENDIF - 200 CONTINUE - ENDDO - -C IF THERE ARE NO RECORDS IN THE SHORT-TERM HISTORY FILE, -C WE MUST ASSIGN A FIRST OCCURRENCE FLAG TO EACH STORM - - IF(NREC .EQ. 0) THEN - DO NOK=1,NOKAY - CALL FSTSTM(NOKAY,NOK,NFIRST,OKAREC) - OKAREC(NFIRST)(IFSTFL:IFSTFL)=':' - ENDDO - ENDIF - -C ADD FIRST OCCURRENCE FLAGS FOR RELOCATED STORMS -C DISABLED 4-9-93 - -C DO NOK=1,NOKAY -C BUFINZ=OKAREC(NOK) -C IF(RELOCZ .EQ. 'R') OKAREC(NOK)(IFSTFL:IFSTFL)=':' -C ENDDO - -C VERY SPECIAL CASE: NO RECORDS IN THE SHORT-TERM HISTORY FILE -C AND A RECORD HAS AN OVER LAND ERROR - - IF(NREC .EQ. 0) THEN - DO NBA=1,NBAD - - IF(IEFAIL(NUMBAD(NBA),4) .EQ. IECOST) THEN - - DO NCK=1,MAXCKS - IF(NCK .NE. 4 .AND. IEFAIL(NUMBAD(NBA),NCK) .GT. 0) GO TO 400 - ENDDO - - BADREC(NBA)(IFSTFL:IFSTFL)='*' - - ENDIF - 400 CONTINUE - ENDDO - ENDIF - - WRITE(6,401) NOKAY,NBAD,NREC - 401 FORMAT(/'...LEAVING ADFSTF, NOKAY, NBAD=',2I4/4X,I3,' RECORDS ', - 1 'READ FROM ALIAS SHORT-TERM HISTORY FILE.') - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: FSTSTM FINDS FIRST OCCURRENCE FOR A STORM -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-07-18 -C -C ABSTRACT: FINDS FIRST OCCURRENCE OF A PARTICULAR STORM BY PICKING -C OUT THE MINIMUM TIME. -C -C PROGRAM HISTORY LOG: -C 1991-07-18 S. J. LORD -C -C USAGE: CALL FSTSTM(NRCMX,NRCSTM,NFIRST,DUMREC) -C INPUT ARGUMENT LIST: -C NRCMX - LENGTH OF ARRAY DUMREC -C NRCSTM - INDEX OF THE RECORD CONTAINING THE DESIRED STORM -C DUMREC - ARRAY OF INPUT RECORDS -C -C OUTPUT ARGUMENT LIST: -C NFIRST - INDEX OF THE FIRST RECORD FOR THE DESIRED STORM -C DUMREC - DESCRIPTION AS ABOVE -C -C REMARKS: NONE -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE FSTSTM(NRCMX,NRCSTM,NFIRST,DUMREC) - - CHARACTER*(*) DUMREC(NRCMX) - - DIMENSION RINC(5) - - SAVE - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 LATNS*1,LONEW*1,FMTVIT*6,BUFINZ*100,RELOCZ*1 - - DIMENSION IVTVAR(MAXVIT),ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION BUFIN(MAXCHR),FMTVIT(MAXVIT) - - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - DATA FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 1 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 2 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 3 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/, - 4 ISTID/6/,IENID/8/ - - DATA NUM/1/ - -C WRITE(6,1) NRCMX,NRCSTM -C 1 FORMAT(/'...ENTERING FSTSTM WITH NRCMX,NRCSTM=',2I4) - - DAYFST=1.0E10 - -C PICK OUT THE RECORD WITH THE MINIMUM DATE FOR THE CHOSEN STORM - - DO NCOM=1,NRCMX - BUFINZ=DUMREC(NCOM) - IF(STMIDZ .EQ. DUMREC(NRCSTM)(ISTID:IENID)) THEN - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) - ENDDO - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - IF(DAYZ .LE. DAYFST) THEN - NFIRST=NCOM - DAYFST=DAYZ - ENDIF - ENDIF - ENDDO - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: RITCUR WRITES Q/C RECORDS TO CURRENT DATA FILE -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: WRITES CURRENT QUALITY CONTROLLED RECORDS TO THE CURRENT -C FILE (UNIT 60). -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C 1991-07-22 S. LORD ADDED IDATEZ,IUTCZ TO ARGUMENT LIST -C 1992-07-01 S. LORD REVISION FOR TIME WINDOW -C -C USAGE: CALL RITCUR(IUNTRD,IUNTCU,NTEST,NOKAY,NBAD,IDATCU,JUTCCU,DAY0, -C MAXREC,IFLLCK,NUMTST,NUMOKA,NUMBAD,FILES,LNDFIL, -C ZZZREC,NNNREC,DUMREC,SCRREC,TSTREC,OKAREC,BADREC) -C INPUT ARGUMENT LIST: -C IUNTRD - UNIT NUMBER FOR READING RECORDS -C IUNTCU - UNIT NUMBER FOR CURRENT DATA FILE -C NTEST - NUMBER OF INPUT RECORDS (>0 FOR FILES=FALSE OPTION, -C - =0 FOR FILES=TRUE OPTION) -C IDATCU - DATE (YYYYMMDD) FOR ACCEPTANCE WINDOW -C JUTCCU - UTC (HHMMSS) FOR ACCEPTANCE WINDOW -C DAY0 - DATE OF ACCEPTANCE WINDOW -C MAXREC - DIMENSION OF INPUT ARRAYS -C FILES - LOGICAL VARIABLE, TRUE IF UPDATED SHORT-TERM HISTORY -C FILE HAS BEEN CREATED -C LNDFIL - LOGICAL VARIABLE, TRUE IF OVER-LAND FILTER SHOULD BE -C APPLIED TO CURRENT RECORDS. -C RECORDS TO THE CURRENT FILE -C DUMREC - CHARACTER VARIABLE -C TSTREC - CHARACTER ARRAY (LENGTH MAXREC) OF INPUT RECORDS. ONLY -C - THE FIRST NTEST ARE VALID IN THE CASE OF FILES=.FALSE. -C NUMTST - INDEX FOR ARRAY TSTREC -C ZZZREC - CHARACTER VARIABLE CONTAINING HEADER INFO -C NNNREC - CHARACTER VARIABLE CONTAINING COLUMN INFO -C -C OUTPUT ARGUMENT LIST: -C OKAREC - CONTAINS CANDIDATE QUALITY CONTROLLED RECORDS COPIED -C - TO THE CURRENT FILE -C NOKAY - NUMBER OF OKAY RECORDS -C NBAD - NUMBER OF RECORDS THAT FAILED THE OVERLAND CHECK -C IFLLCK - CONTAINS FAILURE CODE OF BAD RECORDS -C BADREC - ARRAY CONTAINING BAD RECORDS -C SCRREC - SCRATCH ARRAY CONTAINING STORM IDS AND NAMES -C NUMOKA - ARRAY CONTAINING INDICES OF OKAY RECORDS -C NUMBAD - ARRAY CONTAINING INDICES OF BAD RECORDS -C -C INPUT FILES: -C UNIT 20 - SCRATCH FILE CONTAINING QUALITY CONTROLLED RECORDS -C - IUNTRD POINTS TO THIS FILE WHEN FILES=.TRUE. -C UNIT 22 - ALIAS SHORT-TERM HISTORY FILE CONTAINING RECORDS -C - PROCESSED BY THIS PROGRAM FOR THE LAST SEVERAL DAYS. -C - IUNTRD POINTS TO THIS FILE WHEN FILES=.FALSE. -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 60 - QUALITY CONTROLLED RECORDS (IUNTCU) -C -C REMARKS: IF LENGTH OF OUTPUT RECORDS (MAXCHR) EXCEEDS THE DESIGNATED -C RECORD LENGTH FOR THE FILE (MAXSPC), THIS SUBROUTINE WILL -C PRINT A NASTY MESSAGE AND CALL AN ABORT1 PROGRAM THAT GIVES -C A RETURN CODE OF 20 FOR THIS PROGRAM EXECUTION. UNDER -C THE FILES=TRUE OPTION, RECORDS ARE READ FROM THE SCRATCH -C FILE, DATE CHECKED, CHECKED FOR OVERLAND POSITIONS IF NEED -C BE, AND THEN WRITTEN TO THE CURRENT FILE. UNDER THE FILES= -C FALSE OPTION, ALL RECORDS PROCESSED BY THE PRESENT RUN OF -C THIS PROGRAM MAY BE WRITTEN IN ADDITION TO SOME RECORDS FROM -C THE ALIAS SHORT-TERM HISTORY FILE. IN BOTH OPTIONS, ONLY THE -C LATEST STORM RECORD IS WRITTEN. ALL RECORDS LIE IN A TIME -C WINDOW GIVEN BY DAY0. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE RITCUR(IUNTRD,IUNTCU,NTEST,NOKAY,NBAD,IDATCU,JUTCCU, - 1 DAY0,MAXREC,IFLLCK,NUMTST,NUMOKA,NUMBAD,FILES, - 2 LNDFIL,ZZZREC,NNNREC,DUMREC,SCRREC,TSTREC, - 3 OKAREC,BADREC) - - PARAMETER (MAXSPC=100) - - SAVE - - LOGICAL FIRST,FILES,LNDFIL,FOUND - - CHARACTER*(*) TSTREC(0:MAXREC),OKAREC(MAXREC),BADREC(MAXREC), - 1 ZZZREC,NNNREC,DUMREC,SCRREC(0:MAXREC) - CHARACTER*100 DUMY2K - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - - CHARACTER FMTVIT*6 - - DIMENSION IVTVAR(MAXVIT),ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION FMTVIT(MAXVIT) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - DIMENSION IFLLCK(MAXREC),NUMTST(MAXREC),NUMOKA(MAXREC), - 1 NUMBAD(MAXREC) - - DIMENSION RINC(5) - - DATA FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 2 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 3 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 4 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/, - 5 ISTID/6/,IENID/8/ - - DATA FIRST/.TRUE./,NUM/1/,FIVMIN/3.4722E-3/ - - WRITE(6,1) IUNTRD,IUNTCU,FILES,LNDFIL,IDATCU,JUTCCU,DAY0 - 1 FORMAT(/'...ENTERING RITCUR WITH IUNTRD,IUNTCU,FILES,LNDFIL,', - 1 'IDATCU,JUTCCU,DAY0',2I3,2L2,I9,I7,F10.3) - - IF(FIRST) THEN - FIRST=.FALSE. - IF(MAXCHR .GT. MAXSPC) THEN - WRITE(6,5) MAXCHR,MAXSPC - 5 FORMAT(/'******INSUFFICIENT SPACE ALLOCATED FOR CURRENT HISTORY ', - 1 'FILE.'/7X,'MAXCHR, AVAILABLE SPACE ARE:',2I4) - CALL ABORT1(' RITCUR',1) - ENDIF - - ENDIF - -C RITCUR USES EITHER OF TWO POSSIBLE SOURCES FOR CURRENT RECORDS: -C 1) IF FILES=.TRUE., THE SCRATCH FILE (IUNTOK) CONTAINS -C ALL THE CURRENT RECORDS, INCLUDING THOSE PROCESSED BY A -C PREVIOUS RUN OF THIS PROGRAM. HOWEVER, A POSSIBILITY -C EXISTS THAT A CURRENT COASTAL RECORD MAY BE IN THE -C SCRATCH FILE. THEREFORE, THERE IS AN OPTIONAL FILTER -C (LNDFIL) BY USING A CALL TO SELACK TO WEED OUT THESE -C RECORDS. - -C 2) IF FILES=.FALSE., THE CURRENT RECORDS ARE THOSE -C PROCESSED BY THE PRESENT RUN OF THIS PROGRAM (OKAREC) -C AND CANDIDATES FROM THE ALIAS SHORT-TERM HISTORY FILE. - -C IN EITHER CASE, ONLY THE LATEST RECORD FOR EACH STORM IS -C WRITTEN. - - REWIND IUNTCU - REWIND IUNTRD - NUNIQ=0 - SCRREC(NUNIQ)='ZZZ' - print *, ' ' - print *, ' ' - - 10 CONTINUE - - READ(IUNTRD,11,END=100) DUMREC - 11 FORMAT(A) - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - IF(DUMREC(35:35).EQ.'N' .OR. DUMREC(35:35).EQ.'S') THEN - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',DUMREC(20:21),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntrd,'; DUMREC-6: ',dumrec - PRINT *, ' ' - DUMY2K(1:19) = DUMREC(1:19) - IF(DUMREC(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = DUMREC(20:100) - DUMREC = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ DUMREC(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntrd,'; DUMREC-6: ',dumrec - PRINT *, ' ' - - ELSE IF(DUMREC(37:37).EQ.'N' .OR. DUMREC(37:37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',DUMREC(20:23),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntrd,'; DUMREC-6: ',dumrec - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 10 - - END IF - - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 DUMREC) - ENDDO - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - - IF(DAYZ .GE. DAY0-FIVMIN) THEN - NTEST=NTEST+1 - TSTREC(NTEST)=DUMREC - NUMTST(NTEST)=NTEST -C WRITE(6,33) NTEST,DUMREC -C 33 FORMAT('...READING FROM SCRATCH FILE'/4X,I4,'...',A,'...') - ENDIF - GO TO 10 - - 100 CONTINUE - - IF(NTEST .GT. 0) THEN - IF(LNDFIL .AND. FILES) THEN - WRITE(6,103) NTEST,NOKAY,NBAD - 103 FORMAT(/'...IN RITCUR, CALLING SELACK IN RITCUR TO CHECK FOR ', - 1 'OVERLAND POSITIONS.'/4X,'NTEST,NOKAY,NBAD=',3I4) - - CALL SELACK(NTEST,NOKAY,NBAD,IECOST,IFLLCK,NUMTST,NUMOKA,NUMBAD, - 1 LNDFIL,ZZZREC,NNNREC,TSTREC,BADREC,OKAREC) - - ELSE - DO NOK=1,NTEST - OKAREC(NOK)=TSTREC(NOK) - NUMOKA(NOK)=NOK - ENDDO - NOKAY=NTEST - ENDIF - -C PICK OUT THE UNIQUE STORMS - - DO NOK=1,NOKAY - FOUND=.FALSE. - DO NUNI=1,NUNIQ - IF(OKAREC(NOK)(ISTID:IENID) .EQ. SCRREC(NUNI)(1:IENID-ISTID+1)) - 1 FOUND=.TRUE. - ENDDO - IF(.NOT. FOUND) THEN - NUNIQ=NUNIQ+1 - SCRREC(NUNIQ)(1:IENID-ISTID+1)=OKAREC(NOK)(ISTID:IENID) - ENDIF - ENDDO - WRITE(6,151) NUNIQ - 151 FORMAT(/'...THE NUMBER OF UNIQUE STORMS IS',I4) - -C SCAN THROUGH RECORDS AND PICK OUT THE LATEST STORM RECORD FOR -C EACH UNIQUE STORM. - - WRITE(6,157) - 157 FORMAT(/'...THE FOLLOWING LATEST RECORDS FOR EACH STORM ARE ', - 1 'BEING WRITTEN TO THE CURRENT FILE:') - - DO NUNI=1,NUNIQ - DAYCHK=-1.E10 - INDXZ=-99 - DO NOK=1,NOKAY - IF(OKAREC(NOK)(ISTID:IENID) .EQ. SCRREC(NUNI)(1:IENID-ISTID+1)) - 1 THEN - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 OKAREC(NOK)) - ENDDO - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - IF(DAYZ .GT. DAYCHK) THEN - INDXZ=NOK - DAYCHK=DAYZ - ENDIF - ENDIF - ENDDO - IF(INDXZ .GT. 0) THEN - WRITE(6,173) INDXZ,OKAREC(INDXZ)(1:MAXCHR) - WRITE(IUNTCU,177) OKAREC(INDXZ)(1:MAXCHR) - 173 FORMAT('...',I3,'...',A,'...') - 177 FORMAT(A) - - ELSE - WRITE(6,181) SCRREC(NUNI)(1:IENID-ISTID+1) - 181 FORMAT(/'###STORM ID=',A,' CANNOT BE FOUND. ABORT1') - CALL ABORT1(' RITCUR',181) - ENDIF - ENDDO - WRITE(6,221) NUNIQ,IUNTCU - 221 FORMAT(/'...',I4,' RECORDS HAVE BEEN COPIED TO THE CURRENT FILE ', - 1 '(UNIT',I3,').') - - ELSE - WRITE(6,231) - 231 FORMAT(/'...NO CURRENT RECORDS WILL BE WRITTEN.') - END FILE IUNTCU - ENDIF - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: RITSTH WRITES SHORT-TERM HISTORY FILE -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: WRITES ALL INPUT RECORDS AND QUALITY CONTROL MARKS -C ASSIGNED BY THIS PROGRAM TO A SCRATCH FILE THAT -C CONTAINS ALL RECENT HISTORICAL RECORDS FOR EACH STORM. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C -C USAGE: CALL RITSTH(IUNTHA,IUNTHO,IUNTOK,NOKAY,NBAD,DAYMIN,IECOST, -C MAXCKS,MAXREC,NUMBAD,IEFAIL,DUMREC,OKAREC,BADREC) -C INPUT ARGUMENT LIST: -C IUNTHA - UNIT NUMBER FOR THE ALIAS SHORT-TERM HISTORY FILE. -C IUNTHO - UNIT NUMBER FOR THE ORIGINAL SHORT-TERM HISTORY FILE. -C IUNTOK - UNIT NUMBER FOR THE SCRATCH FILE CONTAINING RECORDS -C - WRITTEN TO THE SHORT-TERM HISTORY FILE. -C NOKAY - NUMBER OF RECORDS THAT PASSED ALL Q/C CHECKS. -C NBAD - NUMBER OF RECORDS THAT HAVE AT LEAST ONE ERROR. -C DAYMIN - EARLIEST (MINIMUM) DATE FOR RECORDS THAT WILL BE -C - COPIED TO THE SHORT-TERM HISTORICAL FILE. -C - UNITS ARE DDD.FFF, WHERE DDD=JULIAN DAY, FFF=FRAC- -C - TIONAL DAY (E.G. .5=1200 UTC). -C IECOST - ERROR CODE FOR AN OVERLAND (COASTAL) RECORD. -C MAXCKS - NUMBER OF QUALITY CONTROL CHECKS. SECOND DIMENSION OF -C - ARRAY IEFAIL IS (0:MAXCKS). -C MAXREC - FIRST DIMENSION OF ARRAY IEFAIL. -C NUMBAD - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH BAD -C - RECORD. -C IEFAIL - INTEGER ARRAY CONTAINING QUALITY MARKS. INDEXING -C - IS ACCORDING TO ARRAY NUMBAD. -C DUMREC - CHARACTER VARIABLE LONG ENOUGH TO HOLD VITAL -C - STATISTICS RECORD. -C OKAREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT HAVE -C - PASSED ALL Q/C CHECKS -C BADREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT HAVE -C - FAILED AT LEAST ONE Q/C CHECK -C -C INPUT FILES: -C UNIT 22 - ALIAS SHORT=TERM HISTORY FILE -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 20 - SCRATCH FILE -C UNIT 21 - ORIGINAL SHORT-TERM HISTORY FILE -C -C REMARKS: RECORDS ARE COPIED FROM THE CURRENT ALIAS SHORT-TERM HISTORY -C FILE TO THE SCRATCH FILE IUNTOK. THE CONTENTS OF IUNTOK -C WILL BE FINALLY BE COPIED TO THE SHORT-TERM HISTORY FILE -C BY ROUTINE FNLCPY. ORIGINAL RECORDS THAT CONTRIBUTED TO -C MAKING ALIAS RECORDS ARE COPIED TO THE ORIGINAL SHORT-TERM -C SHORT-TERM HISTORY FILE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE RITSTH(IUNTHA,IUNTHO,IUNTOK,NOKAY,NBAD,DAYMIN,IECOST, - 1 MAXCKS,MAXREC,NUMBAD,IEFAIL,DUMREC,OKAREC,BADREC) - - SAVE - - CHARACTER*(*) DUMREC,OKAREC(NOKAY),BADREC(NBAD) - - DIMENSION IEFAIL(MAXREC,0:MAXCKS),NUMBAD(NBAD) - - ICALL=2 - - REWIND IUNTOK - -C COPY ALL RECORDS FROM THE CURRENT ORIGINAL SHORT-TERM HISTORY -C FILE TO A SCRATCH FILE (IUNTOK) FOR TEMPORARY STORAGE. - - WRITE(6,1) DAYMIN,ICALL - 1 FORMAT(/'...THE FOLLOWING RECORDS, HAVING DATES GREATER THAN OR ', - 1 'EQUAL TO DAY',F10.3,', WILL BE CHECKED FOR EXACT AND ', - 2 'PARTIAL DUPLICATES '/4X,'(ICALL=',I2,') AND WILL BE ', - 3 'COPIED FROM THE ORIGINAL SHORT-TERM HISTORICAL FILE TO ', - 4 'THE PRELIMINARY QUALITY CONTROLLED FILE'/4X,'(SCRATCH ', - 5 'FILE) FOR TEMPORARY STORAGE:') - - CALL CPYREC(ICALL,IUNTHO,IUNTOK,NOKAY,DAYMIN,DUMREC,OKAREC) - -C NOW ADD THE CURRENT RECORDS. - - WRITE(6,21) - 21 FORMAT(//'...THE FOLLOWING ACCEPTABLE ORIGINAL RECORDS WILL BE ', - 1 'ADDED TO THE NEW ORIGINAL SHORT-TERM HISTORY FILE:'/) - DO NOK=1,NOKAY - IF(OKAREC(NOK)(1:1) .NE. '!') THEN - WRITE(6,23) NOK,OKAREC(NOK) - 23 FORMAT('...',I4,'...',A) - WRITE(IUNTOK,27) OKAREC(NOK) - 27 FORMAT(A) - ENDIF - ENDDO - -C NOW WE APPEND THE SCRATCH FILE WITH RECORDS THAT CONTRIBUTED -C TO ALIAS RECORDS. - - WRITE(6,101) - 101 FORMAT(/'...THE FOLLOWING (BAD) RECORDS WITH RSMCCK OR RCNCIL ', - 1 'ERRORS WILL BE ADDED TO THE SHORT-TERM ORIGINAL'/4X, - 2 'HISTORY FILE:'/) - - DO NBA=1,NBAD - - IF(IEFAIL(NUMBAD(NBA),6) .EQ. 10 .OR. - 1 IEFAIL(NUMBAD(NBA),6) .GE. 21 .OR. - 1 IABS(IEFAIL(NUMBAD(NBA),5)) .EQ. 20) THEN - - DO NCK=1,MAXCKS - IF(NCK .NE. 6 .AND. NCK .NE. 5 .AND. - 1 IEFAIL(NUMBAD(NBA),NCK) .GT. 0) GO TO 150 - ENDDO - - WRITE(6,131) NBA,BADREC(NBA) - 131 FORMAT('...',I4,'...',A) - WRITE(IUNTOK,133) BADREC(NBA) - 133 FORMAT(A) - - ENDIF - 150 CONTINUE - ENDDO - -C COPY RECORDS THAT ARE MORE RECENT THAN DAYMIN FROM THE -C SCRATCH FILE (IUNTOK) TO THE ORIGINAL SHORT-TERM -C HISTORY FILE - - ICALL=1 - REWIND IUNTOK - REWIND IUNTHO - WRITE(6,151) - 151 FORMAT(/'...THE FOLLOWING RECORDS WILL BE COPIED FROM THE ', - 1 'SCRATCH FILE TO THE NEW ORIGINAL SHORT-TERM HISTORICAL ', - 2 'FILE:') - - CALL CPYREC(ICALL,IUNTOK,IUNTHO,NOKAY,DAYMIN,DUMREC,OKAREC) - - ICALL=3 - - REWIND IUNTOK - -C COPY RECORDS THAT ARE MORE RECENT THAN DAYMIN FROM THE -C CURRENT ALIAS SHORT-TERM HISTORY FILE TO A SCRATCH FILE -C (IUNTOK). THEN ADD THE CURRENT RECORDS. - - CALL CPYREC(ICALL,IUNTHA,IUNTOK,NOKAY,DAYMIN,DUMREC,OKAREC) - - WRITE(6,211) - 211 FORMAT(//'...THE FOLLOWING ACCEPTABLE RECORDS WILL BE ADDED TO ', - 1 'THE NEW ALIAS SHORT-TERM HISTORY FILE:'/) - DO NOK=1,NOKAY - WRITE(6,213) NOK,OKAREC(NOK) - 213 FORMAT('...',I4,'...',A) - WRITE(IUNTOK,217) OKAREC(NOK) - 217 FORMAT(A) - ENDDO - -C ADD RECORDS THAT HAVE OVERLAND POSITIONS TO THE SHORT-TERM -C HISTORY FILE, PROVIDED THEY HAVE NO OTHER ERRORS - - WRITE(6,41) - 41 FORMAT(/'...THE FOLLOWING (BAD) RECORDS WITH COASTAL OVERLAND ', - 1 'POSITIONS WILL BE ADDED TO THE NEW ALIAS SHORT-TERM '/4X, - 2 'HISTORY FILE FOR FUTURE TRACK CHECKS:'/) - - DO NBA=1,NBAD - - IF(IEFAIL(NUMBAD(NBA),4) .EQ. IECOST) THEN - - DO NCK=1,MAXCKS - IF(NCK .NE. 4 .AND. IEFAIL(NUMBAD(NBA),NCK) .GT. 0) GO TO 300 - ENDDO - - WRITE(6,261) NBA,BADREC(NBA) - 261 FORMAT('...',I4,'...',A) - WRITE(IUNTOK,263) BADREC(NBA) - 263 FORMAT(A) - - ENDIF - 300 CONTINUE - ENDDO - -C THE SCRATCH FILE (IUNTOK) NOW CONTAINS ALL RECORDS THAT WILL -C BE IN THE NEW ALIAS SHORT-TERM HISTORY FILE. SUBROUTINE FNLCPY -C WILL COPY THIS SCRATCH FILE TO THE NEW ALIAS SHORT-TERM HISTORY -C FILE. - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: RITHIS WRITES RECORDS AND Q/C MARKS TO FILE -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: WRITES ALL INPUT RECORDS AND QUALITY CONTROL MARKS -C ASSIGNED BY THIS PROGRAM TO A LONG-TERM HISTORY FILE. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C -C USAGE: CALL RITHIS(IUNTHI,IEFAIL,NRTOT,IDATE,IUTC,NUMREC,NREC, -C MAXREC,MAXCKS,HROFF,WINCUR,RUNID,LNDFIL,FILES, -C RECORD,ZZZREC,XXXREC) -C INPUT ARGUMENT LIST: -C IUNTHI - UNIT NUMBER FOR THE OUTPUT FILE. NOTE: SIGN OF THE -C - QUALITY MARKS IS ATTACHED TO THIS NUMBER! -C IEFAIL - INTEGER ARRAY CONTAINING QUALITY MARKS. INDEXING -C - IS ACCORDING TO ARRAY NUMREC. SIGN OF THIS NUMBER IS -C - ATTACHED TO IUNTHI! -C NRTOT - TOTAL NUMBER OF RECORDS WRITTEN INTO THE FILE. NREC -C - IS THE NUMBER WRITTEN FOR EACH CALL OF THE ROUTINE. -C IDATE - YYYYMMDD FOR WHICH THE PROGRAM IS BEING RUN. -C IUTC - HHMM FOR WHICH THE PROGRAM IS BEING RUN. -C NUMREC - ARRAY OF RECORD NUMBERS CORRESPONDING TO THE QUALITY -C - MARKS STORED IN ARRAY IEFAIL. -C NREC - NUMBER OF RECORDS TO BE WRITTEN TO THE OUTPUT FILE. -C MAXREC - FIRST DIMENSION OF ARRAY IEFAIL. -C MAXCKS - NUMBER OF QUALITY CONTROL CHECKS. SECOND DIMENSION OF -C - ARRAY IEFAIL IS (0:MAXCKS). -C HROFF - OFFSET (FRACTIONAL HOURS) BETWEEN TIME PROGRAM IS -C - RUN AND THE VALID CYCLE TIME -C WINCUR - TIME WINDOW FOR ADDING RECORDS TO CURRENT FILE -C RUNID - CHARACTER VARIABLE IDENTIFYING RUN -C LNDFIL - LOGICAL VARIABLE, TRUE IF OVER LAND POSITIONS ARE -C - NOT WRITTEN TO CURRENT FILE -C FILES - LOGICAL VARIABLE: TRUE IF SHORT-TERM HISTORY FILES ARE -C - UPDATED. -C RECORD - CHARACTER ARRAY CONTAINING OUTPUT RECORDS. -C ZZZREC - COLUMN HEADER RECORD. -C XXXREC - COLUMN HEADER RECORD. -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 61 - CONTAINS HISTORY OF ALL RECORDS THAT ARE OPERATED ON -C - BY THIS PROGRAM -C -C REMARKS: THE HEADER RECORD IS WRITTEN ON THE FIRST CALL OF THIS -C ROUTINE. IT CONSISTS OF IDATE,IUTC,NRTOT,NREC,ZZZREC -C AND XXXREC. FOR THE FIRST CALL, NREC CORRESPONDS TO THE -C NUMBER OF RECORDS THAT PASSED THE Q/C CHECKS. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE RITHIS(IUNTHI,IEFAIL,NRTOT,IDATE,IUTC,NUMREC,NREC, - 1 MAXREC,MAXCKS,HROFF,WINCUR,RUNID,LNDFIL,FILES, - 2 RECORD,ZZZREC,XXXREC) - - PARAMETER (MAXSPH=131) - - SAVE - - LOGICAL FIRST,LNDFIL,FILES - - CHARACTER*(*) RUNID,RECORD(NREC),ZZZREC,XXXREC - - PARAMETER (MAXCHR=95) - - DIMENSION IEFAIL(MAXREC,0:MAXCKS),NUMREC(NREC) - - DATA FIRST/.TRUE./ - - IF(FIRST) THEN - FIRST=.FALSE. - IF(MAXCHR+1+3*(MAXCKS+1) .GT. MAXSPH) THEN - WRITE(6,1) MAXCHR,MAXCKS,MAXCHR+1+3*(MAXCKS+1),MAXSPH - 1 FORMAT(/'******INSUFFICIENT SPACE ALLOCATED FOR LONG-TERM ', - 1 'HISTORY FILE.'/7X,'MAXCHR,MAXCK,(REQUIRED,AVAILABLE) ', - 2 ' SPACE ARE:',4I4) - CALL ABORT1(' RITHIS',1) - ENDIF - - NROKAY=NREC - WRITE(IABS(IUNTHI),3) IDATE,IUTC,NRTOT,NROKAY,HROFF,RUNID,LNDFIL, - 1 FILES,WINCUR,ZZZREC(1:MAXCHR),XXXREC - 3 FORMAT('IDATE=',I8,' IUTC=',I4,' NRTOT=',I4,' NROKAY=',I4, - 1 ' HROFF=',F6.2,' RUNID=',A12,' LNDFIL=',L1,' FILES=',L1, - 2 ' WINCUR=',F6.3/A,1X,A) - ENDIF - -C OUTPUT UNIT NUMBER IS NEGATIVE FOR OKAY RECORDS (ERROR CODES ARE -C ALWAYS NEGATIVE). OUTPUT UNIT NUMBER IS POSITIVE FOR BAD -C RECORDS, WHICH MAY HAVE A MIXTURE OF POSITIVE AND NEGATIVE -C ERROR CODES. - - IF(IUNTHI .LT. 0) THEN - DO NR=1,NREC - WRITE(IABS(IUNTHI),5) RECORD(NR)(1:MAXCHR),IEFAIL(NUMREC(NR),0), - 1 (-IABS(IEFAIL(NUMREC(NR),ICK)),ICK=1,MAXCKS) - 5 FORMAT(A,1X,I3,8I3) - ENDDO - - ELSE - DO NR=1,NREC - WRITE(IABS(IUNTHI),5) RECORD(NR)(1:MAXCHR),IEFAIL(NUMREC(NR),0), - 1 (IEFAIL(NUMREC(NR),ICK),ICK=1,MAXCKS) - ENDDO - ENDIF - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: FNLCPY RESETS FILES FOR THE NEXT INPUT CYCLE -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: RESETS THE FILES CONTAINING THE INPUT RECORDS FOR THE -C NEXT RUN OF THE PROGRAM. THE SHORT-TERM HISTORY FILE IS UPDATED -C AND ALL INPUT FILES ARE FLUSHED, RECORDS THAT BELONG TO A FUTURE -C CYCLE ARE REINSERTED INTO THE INPUT FILES. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C -C USAGE: CALL FNLCPY(IUNTVZ,MAXUNT,IUNTOK,IUNTHA,MAXREC,NTBP,NUMTBP, -C IUNTIN,TBPREC,DUMREC) -C INPUT ARGUMENT LIST: -C IUNTVZ - UNIT NUMBER FOR FIRST INPUT FILE -C MAXUNT - NUMBER OF INPUT FILES TO BE RESET -C IUNTOK - UNIT NUMBER FOR TEMPORARY HISTORY FILE, WHICH CONTAINS -C - QUALITY CONTROLLED RECORDS, INCLUDING THOSE JUST -C - PROCESSED. -C IUNTHA - UNIT NUMBER FOR THE ALIAS SHORT TERM HISTORY FILE. -C RECORDS ARE COPIED FROM IUNTOK TO IUNTHA. -C MAXREC - MAXIMUM NUMBER OF RECORDS, DIMENSION OF IUNTIN. -C NTBP - NUMBER OF RECORDS FOR THE NEXT CYCLE THAT WILL BE -C - PUT BACK INTO THE INPUT FILES (THROWN BACK INTO THE -C - POND). -C NUMTBP - INTEGER ARRAY CONTAINING INDICES OF RECORDS THAT WILL -C - THROWN BACK INTO THE POND. INDICES REFER TO POSITION -C - IN ARRAY IUNTIN. -C IUNTIN - INTEGER ARRAY CONTAINING UNIT NUMBERS FOR RECORDS -C - THAT WILL BE THROWN BACK INTO THE POND. -C TBPREC - CHARACTER ARRAY CONTAINING RECORDS THAT WILL BE -C - THROWN BACK INTO THE POND. -C DUMREC - CHARACTER VARIABLE FOR COPYING RECORDS TO THE -C - SHORT-TERM HISTORY FILE. -C -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 10 - SCRATCH FILE -C UNIT 22 - SHORT-TERM HISTORY, RECORDS BACK 4 DAYS FROM PRESENT -C UNIT 30 - FILE(S) CONTAINING NEW RECORDS TO BE QUALITY -C - CONTROLLED. RECORDS APPROPRIATE TO A FUTURE CYCLE ARE -C - WRITTEN BACK TO THIS FILE -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE FNLCPY(IUNTVZ,MAXUNT,IUNTOK,IUNTHA,MAXREC,NTBP,NUMTBP, - 1 IUNTIN,TBPREC,DUMREC) - - SAVE - - CHARACTER DUMREC*(*),TBPREC(NTBP)*(*) - CHARACTER*100 DUMY2K - - DIMENSION NUMTBP(NTBP),IUNTIN(MAXREC) - -C FINAL COPYING BACK TO SHORT TERM HISTORY FILE AND ZEROING ALL -C FILES THAT WILL CONTAIN NEW RECORDS FOR THE NEXT CYCLE - - NREC=0 - REWIND IUNTOK - REWIND IUNTHA - - 10 CONTINUE - - READ(IUNTOK,11,END=20) DUMREC - 11 FORMAT(A) - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - IF(DUMREC(35:35).EQ.'N' .OR. DUMREC(35:35).EQ.'S') THEN - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',DUMREC(20:21),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntok,'; DUMREC-7: ',dumrec - PRINT *, ' ' - DUMY2K(1:19) = DUMREC(1:19) - IF(DUMREC(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = DUMREC(20:100) - DUMREC = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ DUMREC(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntok,'; DUMREC-7: ',dumrec - PRINT *, ' ' - - ELSE IF(DUMREC(37:37).EQ.'N' .OR. DUMREC(37:37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',DUMREC(20:23),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntok,'; DUMREC-7: ',dumrec - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 10 - - END IF - - NREC=NREC+1 - WRITE(IUNTHA,11) DUMREC - GO TO 10 - - 20 CONTINUE - WRITE(6,21) NREC,IUNTHA - 21 FORMAT(/'...',I3,' RECORDS HAVE BEEN COPIED TO THE FUTURE ALIAS ', - 1 'SHORT-TERM HISTORY FILE, UNIT=',I3) - - IUNTVI=IUNTVZ - DO NFILE=1,MAXUNT - REWIND IUNTVI - - IF(NTBP .EQ. 0) THEN - - END FILE IUNTVI - WRITE(6,23) IUNTVI - 23 FORMAT(/'...UNIT',I3,' HAS BEEN ZEROED FOR THE NEXT CYCLE.') - -C THROW RECORDS FOR THE NEXT CYCLE BACK INTO THE POND - - ELSE - - WRITE(6,27) IUNTVI - 27 FORMAT(/'...THE FOLLOWING RECORDS WILL BE THROWN BACK INTO THE ', - 1 'POND = UNIT',I3,':') - - DO NTB=1,NTBP - IF(IUNTIN(NUMTBP(NTB)) .EQ. IUNTVI) THEN - WRITE(IUNTVI,11) TBPREC(NTB) - WRITE(6,29) NTB,NUMTBP(NTB),TBPREC(NTB) - 29 FORMAT(3X,I4,'...',I4,'...',A,'...') - ENDIF - ENDDO - - ENDIF - - IUNTVI=IUNTVI+1 - - ENDDO - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: CPYREC COPIES RECORDS CHECKS DATES & DUPLICATES -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: RECORDS ARE CHECKED FOR DATE AND EXACT AND PARTIAL -C DUPLICATES AND COPIED FROM ONE FILE TO A SECOND FILE. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C 1992-03-10 S. LORD - ADDED FILTERS. -C -C USAGE: CALL CPYREC(ICALL,IUNTRD,IUNTWT,NOKAY,DAYMN,DUMREC,OKAREC) -C INPUT ARGUMENT LIST: -C ICALL - TOGGLE FOR FILTER. 1: NO FILTER (STRAIGHT COPY) -C 2: DATE/TIME, STORM ID & NAME -C 3: #2 ABOVE PLUS RSMC (PARTIAL -C DUPLICATE) -C IUNTRD - UNIT NUMBER FOR RECORDS TO BE COPIED -C IUNTWT - RECORDS COPIED TO THIS UNIT NUMBER -C NOKAY - LENGTH OF ARRAY OKAREC -C DAYMN - RECORDS WITH DATES PRIOR TO THIS DAY WILL NOT BE -C - COPIED. DAYMN HAS UNITS OF DDD.FFF, WHERE DDD= -C - JULIAN DAY, FFF=FRACTIONAL DAY (E.G. .5 IS 1200 UTC.) -C DUMREC - CHARACTER VARIABLE LONG ENOUGH TO HOLD COPIED RECORD. -C OKAREC - CHARACTER ARRAY CONTAINING RECORDS AGAINST WHICH -C - EACH COPIED RECORD WILL BE CHECKED FOR EXACT OR -C - PARTIAL DUPLICATES. A PARTIAL DUPLICATE IS ONE WITH -C - THE SAME RSMC, DATE/TIME AND STORM NAME/ID. -C -C INPUT FILES: -C UNIT 20 - SHORT TERM HISTORY -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C UNIT 22 - PRELIMINARY QUALITY CONTROLLED FILE -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE CPYREC(ICALL,IUNTRD,IUNTWT,NOKAY,DAYMN,DUMREC,OKAREC) - - SAVE - - CHARACTER*(*) DUMREC,OKAREC(NOKAY) - CHARACTER*100 DUMY2K - - DIMENSION RINC(5) - - PARAMETER (MAXVIT=15) - - CHARACTER FMTVIT*6 - - DIMENSION IVTVAR(MAXVIT),ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION FMTVIT(MAXVIT) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - DATA FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 1 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 2 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 3 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/ - - DATA NUM/1/,FIVMIN/3.4722E-3/ - - NREC=0 - REWIND IUNTRD - - 10 CONTINUE - - READ(IUNTRD,11,END=100) DUMREC - 11 FORMAT(A) - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - IF(DUMREC(35:35).EQ.'N' .OR. DUMREC(35:35).EQ.'S') THEN - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',DUMREC(20:21),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntrd,'; DUMREC-8: ',dumrec - PRINT *, ' ' - DUMY2K(1:19) = DUMREC(1:19) - IF(DUMREC(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = DUMREC(20:100) - DUMREC = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ DUMREC(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntrd,'; DUMREC-8: ',dumrec - PRINT *, ' ' - - ELSE IF(DUMREC(37:37).EQ.'N' .OR. DUMREC(37:37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',DUMREC(20:23),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntrd,'; DUMREC-8: ',dumrec - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 10 - - END IF - - IF(ICALL .GT. 1) THEN - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 DUMREC) - ENDDO - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) -C WRITE(6,21) IDATEZ,IUTCZ,DAYZ,DAYMN -C 21 FORMAT(/'...CHECKING DATE,TIME FOR COPYING HISTORICAL RECORDS',I9, -C I5,2F10.2) - - IF(DAYZ .GE. DAYMN-FIVMIN) THEN - - DO NOK=1,NOKAY - IF(DUMREC .EQ. OKAREC(NOK)) THEN - WRITE(6,27) DUMREC - 27 FORMAT(/'...EXACT DUPLICATE FOUND IN THE NEW AND HISTORICAL ', - 1 'FILES. THE HISTORICAL RECORD WILL NOT BE COPIED.'/8X, - 2 '...',A/) - GO TO 10 - ENDIF - -C CHECK FOR VARIOUS PARTIAL DUPLICATES: -C ICALL = 2: DATE/TIME, STORM ID, STORM NAME FILTER -C ICALL = 3: #2 ABOVE PLUS RSMC, I.E. A PARTIAL DUPLICATE - - IF(ICALL .EQ. 2 .AND. DUMREC(6:ISTVAR(3)-1) .EQ. - 1 OKAREC(NOK)(6:ISTVAR(3)-1)) THEN - WRITE(6,59) DUMREC,OKAREC(NOK) - 59 FORMAT(/'...PARTIAL DUPLICATE IN STORM ID & NAME, DATE AND TIME ', - 1 'FOUND IN THE NEW AND HISTORICAL FILES.'/4X,'THE ', - 2 'HISTORICAL RECORD WILL NOT BE COPIED.'/5X,'HIS...',A/5X, - 3 'NEW...',A/) - GO TO 10 - ENDIF - - IF(ICALL .GE. 3 .AND. DUMREC(1:ISTVAR(3)-1) .EQ. - 1 OKAREC(NOK)(1:ISTVAR(3)-1)) THEN - WRITE(6,69) DUMREC,OKAREC(NOK) - 69 FORMAT(/'...PARTIAL DUPLICATE IN RSMC, STORM ID & NAME, DATE AND', - 1 ' TIME FOUND IN THE NEW AND HISTORICAL FILES.'/4X,'THE ', - 2 'HISTORICAL RECORD WILL NOT BE COPIED.'/5X,'HIS...',A/5X, - 3 'NEW...',A/) - GO TO 10 - ENDIF - - ENDDO - - NREC=NREC+1 - WRITE(6,83) NREC,DUMREC - 83 FORMAT(3X,I4,'...',A,'...') - - WRITE(IUNTWT,11) DUMREC - ENDIF - - ELSE - NREC=NREC+1 - WRITE(6,83) NREC,DUMREC - WRITE(IUNTWT,11) DUMREC - ENDIF - - GO TO 10 - - 100 WRITE(6,101) NREC,IUNTRD,IUNTWT - 101 FORMAT(/'...',I4,' RECORDS HAVE BEEN COPIED FROM UNIT',I3,' TO ', - 1 'UNIT',I3,'.') - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: DUPCHK READS INPUT RECORDS, DUPLICATE CHECKS -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: READS INPUT RECORDS FROM ALL SPECIFIED FILES. CHECKS FOR -C EXACT DUPLICATES. RETURNS ALL RECORDS TO BE QUALITY CONTROLLED. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C 1992-08-20 S. LORD ADDED NEW UNIT FOR GTS BUFR MESSAGES -C 1997-06-24 S. LORD ADDED NEW UNIT FOR MANUALLY ENTERED MESSAGES -C -C USAGE: CALL DUPCHK(IUNTIN,MAXUNT,MAXREC,IERCHK,NUNI,IFILE, -C NUMOKA,DUMREC,UNIREC,DUPREC,*) -C INPUT ARGUMENT LIST: -C IUNTIN - THE INPUT UNIT NUMBER FOR THE FIRST FILE TO BE READ. -C MAXUNT - NUMBER OF INPUT FILES. -C MAXREC - MAXIMUM NUMBER OF INPUT RECORDS. SUBROUTINE -C - RETURNS WITH CONDITION CODE=51 OR 53 WHEN NUMBER OF -C - UNIQUE OR DUPLICATE RECORDS EXCEEDS MAXREC. -C -C OUTPUT ARGUMENT LIST: -C IERCHK - ERROR INDICATOR. -C NUNI - NUMBER OF UNIQUE RECORDS TO BE QUALITY CONTROLLED -C IFILE - INTEGER ARRAY CONTAINING THE UNIT NUMBER FROM WHICH -C - EACH INPUT RECORD WAS READ. -C NUMOKA - INDEX NUMBER FOR EACH UNIQUE RECORD. INDEX NUMBER -C - IS SIMPLY THE ORDINAL NUMBER OF EACH RECORD READ -C - THAT IS UNIQUE, I.E. NOT A DUPLICATE. -C DUMREC - DUMMY CHARACTER VARIABLE LARGE ENOUGH TO READ A RECORD. -C UNIREC - CHARACTER ARRAY HOLDING ALL INPUT RECORDS. -C DUPREC - CHARACTER ARRAY HOLDING ALL DUPLICATE RECORDS. -C * - ALTERNATE RETURN IF NO INPUT RECORDS ARE FOUND. -C - SUBROUTINE RETURNS WITH IERCHK=161. -C -C INPUT FILES: -C UNIT 30 - FILES CONTAINING NEW RECORDS TO BE QUALITY CONTROLLED. -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE DUPCHK(IUNTIN,MAXUNT,MAXREC,IERCHK,NUNI,IFILE,NUMOKA, - 1 DUMREC,UNIREC,DUPREC,*) - - PARAMETER (MAXFIL=5) - - SAVE - - LOGICAL UNIQUE - CHARACTER*(*) DUMREC,UNIREC(0:MAXREC),DUPREC(MAXREC) - CHARACTER INPFIL(MAXFIL)*4 - CHARACTER*100 DUMY2K - - DIMENSION NUMOKA(MAXREC),IFILE(MAXREC) - - DATA INPFIL/'NHC ','FNOC','GBTB','GBFR','HBTB'/ - - IF(MAXUNT .GT. MAXFIL) THEN - WRITE(6,1) MAXUNT,MAXFIL - 1 FORMAT(/'******MAXIMUM NUMBER OF UNITS TO BE READ=',I3,' EXCEEDS', - 1 ' EXPECTATIONS. NUMBER WILL BE REDUCED TO',I3) - MAXUNT=MAXFIL - ENDIF - - IUNTVI=IUNTIN - IERCHK=0 - NUNI=0 - NDUP=0 - NSTART=0 - NALREC=0 - NRFILE=0 - UNIREC(0)='ZZZZZZZ' - - WRITE(6,3) MAXREC,IUNTVI,MAXUNT,(INPFIL(IFFF), - 1 IUNTIN+IFFF-1,IFFF=1,MAXUNT) - 3 FORMAT(//'...ENTERING DUPCHK: READING FILE AND LOOKING FOR EXACT', - 1 ' DUPLICATES. MAXREC=',I4,'.'/4X,'INITIAL UNIT NUMBER=', - 2 I4,' AND',I3,' UNITS WILL BE READ'/4X,'FILES AND UNIT ', - 3 'NUMBERS ARE:'/(6X,A,':',I3)) - - 10 CONTINUE - - DO NREC=1,MAXREC - READ(IUNTVI,11,END=130) DUMREC - 11 FORMAT(A) - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - IF(DUMREC(35:35).EQ.'N' .OR. DUMREC(35:35).EQ.'S') THEN - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR - -C FOR EXAMPLE: - -C NHC 13L MITCH 981028 1800 164N 0858W 270 010 0957 1008 0371 51 019 0278 0278 0185 0185 D -C 123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123 -C 1 2 3 4 5 6 7 8 9 - -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - FOR -C EXAMPLE, THE ABOVE RECORD IS CONVERTED TO: - -C NHC 13L MITCH 19981028 1800 164N 0858W 270 010 0957 1008 0371 51 019 0278 0278 0185 0185 D -C 12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345 -C 1 2 3 4 5 6 7 8 9 - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',DUMREC(20:21),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntvi,'; DUMREC-1: ',dumrec - PRINT *, ' ' - DUMY2K(1:19) = DUMREC(1:19) - IF(DUMREC(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = DUMREC(20:100) - DUMREC = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ DUMREC(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntvi,'; DUMREC-1: ',dumrec - PRINT *, ' ' - - ELSE IF(DUMREC(37:37).EQ.'N' .OR. DUMREC(37:37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR - -C FOR EXAMPLE: - -C NHC 13L MITCH 19981028 1800 164N 0858W 270 010 0957 1008 0371 51 019 0278 0278 0185 0185 D -C 12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345 -C 1 2 3 4 5 6 7 8 9 - -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',DUMREC(20:23),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iuntvi,'; DUMREC-1: ',dumrec - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT '(a,a,a)', '***** Cannot determine if this record ', - $ 'contains a 2-digit year or a 4-digit year - skip it and ', - $ 'try reading the next record' - PRINT *, ' ' - GO TO 100 - - END IF - - NALREC=NALREC+1 - NRFILE=NRFILE+1 - - UNIQUE=.TRUE. - DO NR=NSTART,NUNI - IF(DUMREC .EQ. UNIREC(NR)) UNIQUE=.FALSE. - ENDDO - - IF(UNIQUE) THEN - - IF(NUNI .EQ. MAXREC) THEN - WRITE(6,51) MAXREC - 51 FORMAT('******INSUFFICIENT STORAGE FOR ALL VITAL ', - 1 'STATISTICS RECORDS, MAXREC=',I5) - IERCHK=51 - RETURN - ELSE - NUNI=NUNI+1 - NUMOKA(NUNI)=NUNI - UNIREC(NUNI)=DUMREC - IFILE(NUNI)=IUNTVI - ENDIF - - ELSE - - IF(NDUP .EQ. MAXREC) THEN - WRITE(6,51) MAXREC - IERCHK=53 - RETURN - ELSE - NDUP=NDUP+1 - DUPREC(NDUP)=DUMREC - ENDIF - ENDIF - NSTART=1 - - 100 continue - - ENDDO - - 130 CONTINUE - -C LOOP FOR MORE FILES IF REQUESTED - - IF(NRFILE .EQ. 0) WRITE(6,133) INPFIL(IUNTVI-29) - 133 FORMAT(/'###',A,' FILE IS EMPTY.') - - IUNTVI=IUNTVI+1 - IF(IUNTVI-IUNTIN .LT. MAXUNT) THEN - NRFILE=0 - WRITE(6,141) IUNTVI,MAXUNT - 141 FORMAT(/'...LOOPING TO READ UNIT NUMBER',I3,'. MAXUNT=',I3) - GO TO 10 - ENDIF - - WRITE(6,151) NALREC - 151 FORMAT(//'...TOTAL NUMBER OF RECORDS=',I4) - WRITE(6,153) NUNI,(NUMOKA(NR),UNIREC(NR),NR=1,NUNI) - 153 FORMAT(/'...',I4,' RECORDS ARE UNIQUE, BUT NOT ERROR CHECKED.'// - 1 (' ...',I4,'...',A)) - WRITE(6,157) NDUP,(NR,DUPREC(NR),NR=1,NDUP) - 157 FORMAT(/'...',I4,' RECORDS ARE EXACT DUPLICATES:'//(' ...',I4, - 1 '...',A)) - - IF(NUNI .EQ. 0) THEN - WRITE(6,161) - 161 FORMAT(/'###THERE ARE NO RECORDS TO BE READ. THIS PROGRAM ', - 1 'WILL COMPLETE FILE PROCESSING AND LEAVE AN EMPTY ', - 2 ' "CURRENT" FILE!!') - IERCHK=161 - RETURN 1 - ENDIF - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: BLNKCK CHECKS FOR PROPER COLUMNAR FORMAT -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: CHECKS ALL INPUT RECORDS FOR PROPER COLUMNAR FORMAT. -C THE TABULAR INPUT RECORD HAS SPECIFIED BLANK COLUMNS. IF -C NONBLANK CHARACTERS ARE FOUND IN SPECIFIED BLANK COLUMNS, -C AN OBVIOUS ERROR HAS OCCURRED. THE RECORD IS REJECTED IN THIS -C CASE. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C 1994-06-20 S. LORD MODIFIED MAXCHK FOR THE GFDL FORMAT -C -C USAGE: CALL BLNKCK(NTEST,NOKAY,NBAD,IFBLNK,NUMTST,NUMOKA,NUMBAD, -C ZZZREC,NNNREC,TSTREC,BADREC,OKAREC) -C INPUT ARGUMENT LIST: -C NTEST - NUMBER OF RECORDS TO BE TESTED. -C NUMTST - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH RECORD -C - TO BE TESTED. -C ZZZREC - CHARACTER VARIABLE CONTAINING VARIABLE NAMES. -C NNNREC - CHARACTER VARIABLE CONTAINING COLUMN NUMBERS. -C TSTREC - CHARACTER ARRAY CONTAINING RECORDS TO BE TESTED. -C -C OUTPUT ARGUMENT LIST: -C NOKAY - NUMBER OF RECORDS THAT PASSED THE BLANK CHECK. -C NBAD - NUMBER OF RECORDS THAT FAILED THE BLANK CHECK. -C IFBLNK - INTEGER ARRAY CONTAINING ERROR CODE FOR EACH INPUT -C - RECORD. SEE COMMENTS IN PGM FOR KEY TO ERROR CODES. -C NUMOKA - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH GOOD -C - RECORD. -C NUMBAD - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH BAD -C - RECORD. -C BADREC - CHARACTER ARRAY CONTAINING BAD RECORDS THAT FAILED -C - THE BLANK CHECK. -C OKAREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT PASSED -C - THE BLANK CHECK. -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE BLNKCK(NTEST,NOKAY,NBAD,IFBLNK,NUMTST,NUMOKA,NUMBAD, - 1 ZZZREC,NNNREC,TSTREC,BADREC,OKAREC) - - PARAMETER (MAXCHK=95) - PARAMETER (NERCBL=3) - PARAMETER (MAXREC=1000) - - SAVE - - CHARACTER*(*) ZZZREC,NNNREC,TSTREC(0:NTEST),BADREC(MAXREC), - 1 OKAREC(NTEST) - CHARACTER ERCBL(NERCBL)*60 - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - - CHARACTER NAMVAR*5 - - DIMENSION ISTVAR(MAXVIT) - - DIMENSION NAMVAR(MAXVIT+1) - - DIMENSION IFBLNK(MAXREC),NUMOKA(NTEST),NUMBAD(MAXREC), - 1 NUMTST(NTEST) - - DATA ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 1 LENHED/18/ - - DATA NAMVAR/'DATE ','TIME ','LAT. ','LONG.','DIR ','SPEED', - 1 'PCEN ','PENV ','RMAX ','VMAX ','RMW ','R15NE', - 2 'R15SE','R15SW','R15NW','DEPTH'/ - - DATA ERCBL - 1 /'1 : LAST NON-BLANK CHARACTER IS IN THE WRONG COLUMN ', - 2 '18 : FIRST 18 COLUMNS ARE BLANK ', - 3 '19-87: FIRST NON-BLANK CHARACTER FOUND IN THIS COLUMN '/ - -C ERROR CODES FOR BAD RECORDS RETURNED IN IFBLNK ARE AS FOLLOWS: -C 1: LAST NON-BLANK CHARACTER IS IN THE WRONG COLUMN -C 18 : FIRST 18 COLUMNS ARE BLANK -C 19-87: NON-BLANK CHARACTER FOUND IN A BLANK COLUMN. ERROR -C CODE GIVES COLUMN OF LEFT-MOST OCCURRENCE - -C SET COUNTERS FOR INITIAL SORTING OF ALL RECORDS. ALL SUBSEQUENT -C CALLS TO THIS ROUTINE SHOULD BE FOR SINGLE RECORDS - - WRITE(6,1) NTEST - 1 FORMAT(//'...ENTERING BLNKCK, LOOKING FOR WRONGLY POSITIONED ', - 1 ' BLANKS. NTEST=',I4//) - - NADD=0 - IF(NREC .GT. 0) THEN - NOKAY=0 - NBAD =0 - ENDIF - -C DO ALL RECORDS - - DO NREC=1,NTEST - IETYP=0 - -C FIND THE RIGHT-MOST NON-BLANK CHARACTER: IT SHOULD CORRESPOND -C TO THE MAXIMUM NUMBER OF CHARACTERS IN THE MESSAGE (MAXCHR) - - DO ICH=MAXCHK,1,-1 - IF(TSTREC(NREC)(ICH:ICH) .NE. ' ') THEN - IBLANK=ICH - GO TO 31 - ENDIF - ENDDO - 31 CONTINUE -C WRITE(6,3311) IBLANK,TSTREC(NREC)(1:IBLANK) -C3311 FORMAT(/'...TESTING LENGTH OF RECORD, IBLANK,TSTREC=',I4/4X,'...', -C 1 A,'...') -C - IF(IBLANK .NE. MAXCHR) THEN - IETYP=1 - WRITE(6,33) NREC,IBLANK,NNNREC,ZZZREC,TSTREC(NREC) - 33 FORMAT(/'...RECORD #',I3,' HAS RIGHT-MOST NON-BLANK CHARACTER ', - 1 'AT POSITION',I4/2(1X,'@@@',A,'@@@'/),4X,A) - GO TO 41 - ENDIF - -C CHECK FOR BLANKS IN THE HEADER SECTION (THE FIRST 18 COLUMNS) - - IF(TSTREC(NREC)(1:LENHED) .EQ. ' ') THEN - IETYP=LENHED - WRITE(6,35) NREC,NNNREC,ZZZREC,TSTREC(NREC) - 35 FORMAT(/'...RECORD #',I3,' HAS BLANK HEADER SECTION.'/2(1X,'@@@', - 1 A,'@@@'/),4X,A) - ENDIF - -C CHECK COLUMN BLANKS STARTING TO THE LEFT OF THE YYMMDD GROUP - - DO IBL=1,MAXVIT - IF(TSTREC(NREC)(ISTVAR(IBL)-1:ISTVAR(IBL)-1) .NE. ' ') THEN - IETYP=ISTVAR(IBL)-1 - WRITE(6,39) TSTREC(NREC)(ISTVAR(IBL)-1:ISTVAR(IBL)-1), - 1 ISTVAR(IBL)-1,NAMVAR(IBL),NNNREC,ZZZREC,TSTREC(NREC) - 39 FORMAT(/'...NONBLANK CHARACTER ',A1,' AT POSITION ',I3, - 1 ' PRECEEDING VARIABLE',1X,A/2(1X,'@@@',A,'@@@'/),4X,A) - GO TO 41 - ENDIF - ENDDO - - 41 IFBLNK(NUMTST(NREC))=IETYP - IF(IETYP .GT. 0) THEN - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(NREC) - BADREC(NADD+NBAD)=TSTREC(NREC) - ELSE - NOKAY=NOKAY+1 - NUMOKA(NOKAY)=NUMTST(NREC) - OKAREC(NOKAY)=TSTREC(NREC) - ENDIF - - ENDDO - - print *, ' ' - IF(NTEST .GT. 1) THEN - WRITE(6,101) NOKAY,NADD,NTEST,(ERCBL(NER),NER=1,NERCBL) - 101 FORMAT(/'...RESULTS OF THE GLOBAL BLANK CHECK ARE: NOKAY=',I4, - 1 ' AND NADD=',I4,' FOR A TOTAL OF ',I4,' RECORDS.'//4X, - 2 'ERROR CODES ARE:'/(6X,A)) - WRITE(6,103) - 103 FORMAT(/'...OKAY RECORDS ARE:',100X,'ERC'/) - DO NOK=1,NOKAY - WRITE(6,109) NOK,NUMOKA(NOK),OKAREC(NOK),IFBLNK(NUMOKA(NOK)) - 109 FORMAT(3X,I4,'...',I4,'...',A,'...',I3) - ENDDO - IF(NADD .GT. 0) WRITE(6,111) (NBAD+NBA,NUMBAD(NBAD+NBA), - 1 BADREC(NBAD+NBA), - 2 IFBLNK(NUMBAD(NBAD+NBA)), - 3 NBA=1,NADD) - 111 FORMAT(/' ADDED BAD RECORDS ARE:',95X,'ERC'/(3X,I4,'...',I4, - 1 '...',A,'...',I3)) - NBAD=NBAD+NADD - ELSE - WRITE(6,113) IETYP,TSTREC(NTEST),NOKAY - 113 FORMAT(/'...BLANK TEST FOR SINGLE RECORD, BLANK ERROR CODE=',I2, - 1 ' RECORD IS:'/4X,'...',A/4X,'NOKAY=',I2) - ENDIF - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: READCK CHECKS READABILITY OF EACH RECORD -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: CHECKS READABILITY OF EACH RECORD. SINCE THE INPUT -C RECORD FORMAT CONTAINS ONLY NUMBERS AND LETTERS, -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C 1992-09-18 S. J. LORD ADDED CHECK FOR CORRECT MISSING DATA IN READCK -C -C USAGE: CALL READCK(NTEST,NOKAY,NBAD,IFREAD,NUMTST,NUMOKA,NUMBAD, -C ZZZREC,NNNREC,TSTREC,BADREC,OKAREC) -C INPUT ARGUMENT LIST: -C NTEST - NUMBER OF RECORDS TO BE TESTED. -C NUMTST - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH RECORD -C - TO BE TESTED. -C ZZZREC - CHARACTER VARIABLE CONTAINING VARIABLE NAMES. -C NNNREC - CHARACTER VARIABLE CONTAINING COLUMN NUMBERS. -C TSTREC - CHARACTER ARRAY CONTAINING RECORDS TO BE TESTED. -C -C OUTPUT ARGUMENT LIST: -C NOKAY - NUMBER OF RECORDS THAT PASSED THE BLANK CHECK. -C NBAD - NUMBER OF RECORDS THAT FAILED THE BLANK CHECK. -C IFREAD - INTEGER ARRAY CONTAINING ERROR CODE FOR EACH INPUT -C - RECORD. SEE COMMENTS IN PGM FOR KEY TO ERROR CODES. -C NUMOKA - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH GOOD -C - RECORD. -C NUMBAD - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH BAD -C - RECORD. -C BADREC - CHARACTER ARRAY CONTAINING BAD RECORDS THAT FAILED -C - THE BLANK CHECK. -C OKAREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT PASSED -C - THE BLANK CHECK. -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE READCK(NTEST,NOKAY,NBAD,IFREAD,NUMTST,NUMOKA,NUMBAD, - 1 ZZZREC,NNNREC,TSTREC,BADREC,OKAREC) - - PARAMETER (NERCRD=2) - PARAMETER (MAXREC=1000) - - SAVE - - CHARACTER*(*) ZZZREC,NNNREC,TSTREC(0:NTEST),BADREC(MAXREC), - 1 OKAREC(NTEST),ERCRD(NERCRD)*60 - - PARAMETER (MAXVIT=15) - PARAMETER (ITERVR=10) - - CHARACTER FMTVIT*6,NAMVAR*5 - - DIMENSION IVTVAR(MAXVIT),ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION NAMVAR(MAXVIT+1),FMTVIT(MAXVIT),MISSNG(MAXVIT) - - DIMENSION IFREAD(MAXREC),NUMOKA(NTEST),NUMBAD(MAXREC), - 1 NUMTST(NTEST) - - DATA FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 1 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 2 MISSNG/-9999999,-999,-99,-999,2*-99,3*-999,-9,-99,4*-999/, - 3 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 4 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/ - - DATA NAMVAR/'DATE ','TIME ','LAT. ','LONG.','DIR ','SPEED', - 1 'PCEN ','PENV ','RMAX ','VMAX ','RMW ','R15NE', - 2 'R15SE','R15SW','R15NW','DEPTH'/ - - DATA NUM/1/ - - DATA ERCRD - 1 /'N: INDEX OF THE FIRST UNREADABLE RECORD ', - 2 '20-N: WRONG MISSING CODE '/ - -C ERROR CODE FOR UNREADABLE RECORD IS THE INDEX OF THE FIRST -C UNREADABLE RECORD. -C ***NOTE: THERE MAY BE ADDITIONAL UNREADABLE RECORDS TO THE -C RIGHT. - - WRITE(6,1) NTEST - 1 FORMAT(//'...ENTERING READCK, LOOKING FOR UNREADABLE (NOT ', - 1 ' CONTAINING INTEGERS) PRIMARY AND SECONDARY VARIABLES,', - 2 ' NTEST=',I4//) - - NADD=0 - -C DO ALL RECORDS - - DO NREC=1,NTEST - IETYP=0 - - DO IV=1,ITERVR - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 TSTREC(NREC)) - IF(IERDEC .NE. 0) THEN - IETYP=IV - WRITE(6,7) NREC,ISTVAR(IV),NAMVAR(IV),NNNREC,ZZZREC,TSTREC(NREC) - 7 FORMAT(/'...RECORD #',I3,' IS UNREADABLE AT POSITION',I3, - 1 ' FOR VARIABLE ',A,'.'/2(1X,'@@@',A,'@@@'/),4X,A) - GO TO 11 - ENDIF - ENDDO - 11 CONTINUE - - DO IV=1,ITERVR - IF(IVTVAR(IV) .LT. 0 .AND. IVTVAR(IV) .NE. MISSNG(IV)) THEN - IETYP=20-IV - WRITE(TSTREC(NREC) (ISTVAR(IV):IENVAR(IV)),FMTVIT(IV))MISSNG(IV) - ENDIF - ENDDO - - IFREAD(NUMTST(NREC))=IETYP - IF(IETYP .GT. 0) THEN - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(NREC) - BADREC(NADD+NBAD)=TSTREC(NREC) - ELSE - NOKAY=NOKAY+1 - NUMOKA(NOKAY)=NUMTST(NREC) - OKAREC(NOKAY)=TSTREC(NREC) - ENDIF - - ENDDO - - WRITE(6,101) NOKAY,NADD,NTEST,(ERCRD(NER),NER=1,NERCRD) - 101 FORMAT(//'...RESULTS OF THE READABILITY CHECK ARE: NOKAY=',I4, - 1 ' AND NADD=',I4,' FOR A TOTAL OF ',I4,' RECORDS.'//4X, - 2 'ERROR CODES ARE:'/(6X,A)) - WRITE(6,103) - 103 FORMAT(/'...OKAY RECORDS ARE:',100X,'ERC'/) - DO NOK=1,NOKAY - WRITE(6,109) NOK,NUMOKA(NOK),OKAREC(NOK),IFREAD(NUMOKA(NOK)) - 109 FORMAT(3X,I4,'...',I4,'...',A,'...',I3) - ENDDO - IF(NADD .GT. 0) WRITE(6,111) (NBAD+NBA,NUMBAD(NBAD+NBA), - 1 BADREC(NBAD+NBA), - 2 IFREAD(NUMBAD(NBAD+NBA)), - 3 NBA=1,NADD) - 111 FORMAT(/' ADDED BAD RECORDS ARE:',95X,'ERC'/(3X,I4,'...',I4, - 1 '...',A,'...',I3)) - NBAD=NBAD+NADD - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: DTCHK CHECK FOR VALID DATE FOR ALL RECORDS -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: CHECKS FOR VALID DATE IN ALL RECORDS. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C -C USAGE: CALL DTCHK(NTEST,NOKAY,NBAD,NTBP,IFDTCK,NUMTST,NUMOKA, -C NUMBAD,NUMTBP,DAYMN,DAYMX1,DAYMX2,DAYOFF,TSTREC, -C BADREC,OKAREC,TBPREC) -C INPUT ARGUMENT LIST: -C NTEST - NUMBER OF RECORDS TO BE TESTED. -C NUMTST - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH RECORD -C - TO BE TESTED. -C DAYMN - EARLIEST (MINIMUM) DATE FOR ACCEPTANCE OF A RECORD. -C - UNITS ARE DDD.FFF, WHERE DDD=JULIAN DAY, FFF=FRAC- -C - TIONAL DAY (E.G. .5=1200 UTC). -C DAYMX1 - LATEST (MAXIMUM) DATE FOR ACCEPTANCE OF A RECORD. -C - UNITS ARE FRACTIONAL JULIAN DAYS AS IN DAYMN ABOVE. -C DAYMX2 - EARLIEST (MINIMUM) DATE FOR REJECTION OF A RECORD. -C - RECORDS WITH DATES BETWEEN DAYMX1 AND DAYMX2 ARE -C - ASSUMED TO BELONG TO A FUTURE CYCLE AND ARE THROWN -C - BACK INTO THE POND, I.E. NEITHER REJECTED OR ACCEPTED. -C - UNITS ARE FRACTIONAL JULIAN DAYS AS IN DAYMN ABOVE. -C DAYOFF - OFFSET DAYS WHEN ACCEPTANCE WINDOW CROSSES YEAR -C BOUNDARY -C TSTREC - CHARACTER ARRAY CONTAINING RECORDS TO BE TESTED. -C -C OUTPUT ARGUMENT LIST: -C NOKAY - NUMBER OF RECORDS THAT PASSED THE BLANK CHECK. -C NBAD - NUMBER OF RECORDS THAT FAILED THE BLANK CHECK. -C NTBP - NUMBER OF RECORDS THAT ARE TO BE RESTORED TO THE -C - INPUT FILES (THROWN BACK INTO THE POND). -C IFDTCK - INTEGER ARRAY CONTAINING ERROR CODE FOR EACH INPUT -C - RECORD. SEE COMMENTS IN PGM FOR KEY TO ERROR CODES. -C NUMOKA - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH GOOD -C - RECORD. -C NUMBAD - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH BAD -C - RECORD. -C NUMTBP - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH RECORD -C - TO BE THROWN BACK INTO THE POND. -C BADREC - CHARACTER ARRAY CONTAINING BAD RECORDS THAT FAILED -C - THE BLANK CHECK. -C OKAREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT PASSED -C - THE BLANK CHECK. -C TBPREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT ARE TO -C - BE THROWN BACK INTO THE POND. -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE DTCHK(NTEST,NOKAY,NBAD,NTBP,IFDTCK,NUMTST,NUMOKA, - 1 NUMBAD,NUMTBP,DAYMN,DAYMX1,DAYMX2,DAYOFF,TSTREC, - 2 BADREC,OKAREC,TBPREC) - - PARAMETER (NERCDT=8) - PARAMETER (MAXREC=1000) - PARAMETER (MAXTBP=20) - - SAVE - - CHARACTER*(*) TSTREC(0:NTEST),BADREC(MAXREC),OKAREC(NTEST), - 1 TBPREC(MAXTBP),ERCDT(NERCDT)*60 - - PARAMETER (MAXVIT=15) - - CHARACTER FMTVIT*6 - - DIMENSION IVTVAR(MAXVIT),ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION FMTVIT(MAXVIT) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - DIMENSION RINC(5) - - DIMENSION IFDTCK(MAXREC),NUMOKA(NTEST),NUMBAD(MAXREC), - 1 NUMTST(NTEST),NUMTBP(MAXTBP),IDAMX(12) - - DATA FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 1 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 2 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 3 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/ - - DATA NUM/1/,IYRMN/0/,IYRMX/9999/,IMOMN/1/,IMOMX/12/,IDAMN/1/, - 1 IDAMX/31,29,31,30,31,30,31,31,30,31,30,31/,IHRMN/0/, - 2 IHRMX/23/,IMINMN/0/,IMINMX/59/ - - DATA ERCDT - 1 /' 1: YEAR OUT OF RANGE ', - 2 ' 2: MONTH OUT OF RANGE ', - 3 ' 3: DAY OUT OF RANGE ', - 4 ' 4: HOUR OUT OF RANGE ', - 5 ' 5: MINUTE OUT OF RANGE ', - 6 ' 6: DATE/TIME LESS THAN ALLOWED WINDOW ', - 7 ' 7: DATE/TIME GREATER THAN ALLOWED MAXIMUM WINDOW ', - 8 '-8: DATE/TIME PROBABLY VALID AT LATER CYCLE TIME (TBIP) '/ - -C ERROR CODES FOR BAD RECORDS RETURNED IN IFDTCK ARE AS FOLLOWS: -C 1: YEAR OUT OF RANGE -C 2: MONTH OUT OF RANGE -C 3: DAY OUT OF RANGE -C 4: HOUR OUT OF RANGE -C 5: MINUTE OUT OF RANGE -C 6: DATE/TIME LESS THAN ALLOWED WINDOW -C 7: DATE/TIME GREATER THAN ALLOWED WINDOW -C -8: DATE/TIME PROBABLY VALID AT LATER CYCLE TIME (THROWN BACK -C INTO THE POND) - - WRITE(6,1) NTEST,NOKAY,NBAD,DAYMN,DAYMX1,DAYMX2 - 1 FORMAT(//'...ENTERING DTCHK, LOOKING FOR BAD DATE/TIME GROUPS. ', - 1 'NTEST,NOKAY,NBAD=',3I4,'.'/4X,'DAYMN,DAYMX1,DAYMX2=', - 2 3F12.4//) - - NADD=0 - NTBPZ=0 - DO NREC=1,NTEST - - IETYP=0 - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 TSTREC(NREC)) - ENDDO - -C CONVERT DATE/TIME TO FLOATING POINT DATE - - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - - IF(IYR .LT. IYRMN .OR. IYR .GT. IYRMX) THEN - IETYP=1 - WRITE(6,21) IYR,IYRMN,IYRMX,TSTREC(NREC) - 21 FORMAT(/'******DECODED YEAR OUT OF ALLOWED BOUNDS, IYR,IYRMN,', - 1 'IYRMX,RECORD=',3I9/8X,A) - ENDIF - - IF(IMO .LT. IMOMN .OR. IMO .GT. IMOMX) THEN - IETYP=2 - WRITE(6,31) IMO,IMOMN,IMOMX,TSTREC(NREC) - 31 FORMAT(/'******DECODED MONTH OUT OF ALLOWED BOUNDS, IMO,IMOMN,', - 1 'IMOMX,RECORD=',3I9/8X,A/5X,'...(DAY WILL NOT BE CHECKED)') - - ELSE - IF(IDA .LT. IDAMN .OR. IDA .GT. IDAMX(IMO)) THEN - IETYP=3 - WRITE(6,41) IDA,IDAMN,IDAMX,TSTREC(NREC) - 41 FORMAT(/'******DECODED DAY OUT OF ALLOWED BOUNDS, IDA,IDAMN,', - 1 'IDAMX,RECORD=',3I9/8X,A) - ENDIF - ENDIF - - IF(IHR .LT. IHRMN .OR. IHR .GT. IHRMX) THEN - IETYP=4 - WRITE(6,51) IHR,IHRMN,IHRMX,TSTREC(NREC) - 51 FORMAT(/'******DECODED HOUR OUT OF ALLOWED BOUNDS, IHR,IHRMN,', - 1 'IHRMX,RECORD=',3I9/8X,A) - ENDIF - - IF(IMIN .LT. IMINMN .OR. IMIN .GT. IMINMX) THEN - IETYP=5 - WRITE(6,61) IMIN,IMINMN,IMINMX,TSTREC(NREC) - 61 FORMAT(/'******DECODED MINUTE OUT OF ALLOWED BOUNDS, IMIN,', - 1 'IMINMN,IMINMX,RECORD=',3I9/8X,A) - ENDIF - - IF(IETYP .EQ. 0 .AND. DAYZ+DAYOFF .LT. DAYMN) THEN - IETYP=6 - WRITE(6,71) DAYZ,DAYMN,TSTREC(NREC) - 71 FORMAT(/'******DECODED DAY LESS THAN MINIMUM WINDOW, DAYZ,DAYMN,', - 1 'RECORD=',2F12.4/8X,A) - ENDIF - - IF(IETYP .EQ. 0 .AND. DAYZ+DAYOFF .GT. DAYMX2) THEN - IETYP=7 - WRITE(6,73) DAYZ,DAYMX2,TSTREC(NREC) - 73 FORMAT(/'******DECODED DAY EXCEEDS MAXIMUM WINDOW, DAYZ,DAYMX2,', - 1 'RECORD=',2F12.4/8X,A) - ENDIF - - IF(IETYP .EQ. 0 .AND. DAYZ .GT. DAYMX1) THEN - IETYP=-8 - WRITE(6,77) DAYZ,DAYMX1,TSTREC(NREC) - 77 FORMAT(/'###DECODED DAY PROBABLY VALID AT FUTURE CYCLE TIME. ', - 1 'DAYZ,DAYMX1,RECORD=',2F12.4/8X,A/4X, 'THIS RECORD WILL ', - 2 'BE THROWN BACK IN THE POND.') - ENDIF - - IFDTCK(NUMTST(NREC))=IETYP - IF(IETYP .GT. 0) THEN - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(NREC) - BADREC(NADD+NBAD)=TSTREC(NREC) - ELSE IF(IETYP .EQ. 0) THEN - NOKAY=NOKAY+1 - NUMOKA(NOKAY)=NUMTST(NREC) - OKAREC(NOKAY)=TSTREC(NREC) - ELSE - NTBPZ=NTBPZ+1 - NUMTBP(NTBPZ)=NUMTST(NREC) - TBPREC(NTBPZ)=TSTREC(NREC) - ENDIF - - ENDDO - - NTBP=NTBPZ - WRITE(6,101) NOKAY,NADD,NTBP,NTEST,(ERCDT(NER),NER=1,NERCDT) - 101 FORMAT(//'...RESULTS OF THE DATE/TIME CHECK ARE: NOKAY=',I4, - 1 ' ,NADD=',I4,' AND NTBP=',I4,' FOR A TOTAL OF',I4, - 2 ' RECORDS.'//4X,'ERROR CODES ARE:'/(6X,A)) - - WRITE(6,103) - 103 FORMAT(/'...OKAY RECORDS ARE:',100X,'ERC'/) - DO NOK=1,NOKAY - WRITE(6,109) NOK,NUMOKA(NOK),OKAREC(NOK),IFDTCK(NUMOKA(NOK)) - 109 FORMAT(3X,I4,'...',I4,'...',A,'...',I3) - ENDDO - - WRITE(6,113) - 113 FORMAT(/'...RECORDS THAT WILL BE RETURNED TO THE INPUT FILES ', - 1 '(THROWN BACK INTO THE POND) ARE:',36X,'ERC'/) - DO NTB=1,NTBP - WRITE(6,119) NTB,NUMTBP(NTB),TBPREC(NTB), - 1 IFDTCK(NUMTBP(NTB)) - 119 FORMAT(3X,I4,'...',I4,'...',A,'...',I3) - ENDDO - - IF(NADD .GT. 0) WRITE(6,131) (NBAD+NBA,NUMBAD(NBAD+NBA), - 1 BADREC(NBAD+NBA), - 2 IFDTCK(NUMBAD(NBAD+NBA)), - 3 NBA=1,NADD) - 131 FORMAT(/' ADDED BAD RECORDS ARE:',95X,'ERC'/(3X,I4,'...',I4, - 1 '...',A,'...',I3)) - NBAD=NBAD+NADD - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: SETMSK CHECKS ALL RECORDS FOR CORRECT LAT/LON -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: INPUT RECORDS ARE CHECKED FOR PHYSICALLY REALISTIC -C LATITUDE AND LONGITUDE (-70 Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',SCRATC(NCHECK)(20:21),'"' - PRINT *, ' ' - PRINT *, 'From unit ',iuntho,'; SCRATC(NCHECK)-9: ', - $ scratc(ncheck) - PRINT *, ' ' - DUMY2K(1:19) = SCRATC(NCHECK)(1:19) - IF(SCRATC(NCHECK)(20:21).GT.'20') THEN - DUMY2K(20:21) = '19' - ELSE - DUMY2K(20:21) = '20' - ENDIF - DUMY2K(22:100) = SCRATC(NCHECK)(20:100) - SCRATC(NCHECK) = DUMY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ SCRATC(NCHECK)(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT *, 'From unit ',IUNTHo,'; SCRATC(NCHECK)-9: ', - $ scratc(ncheck) - PRINT *, ' ' - - ELSE IF(SCRATC(NCHECK)(37:37).EQ.'N' .OR. - 1 SCRATC(NCHECK)(37:37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',SCRATC(NCHECK)(20:23),'"' - PRINT *, ' ' - PRINT *, 'From unit ',iuntho,'; SCRATC(NCHECK)-9: ', - $ SCRATC(NCHECK) - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 30 - - END IF - - WRITE(6,19) NCHECK,SCRATC(NCHECK) - NCPY=NCPY+1 - NCHECK=NCHECK+1 - GO TO 30 - - 40 CONTINUE - NCHECK=NCHECK-1 - WRITE(6,41) NCPY,NCHECK - 41 FORMAT('...',I3,' RECORDS COPIED FOR A TOTAL OF ',I4,' TO BE ', - 1 'CHECKED.') - - NADD=0 - DO NREC=1,NTEST - -C INITIALIZE THE CHARACTER STRING AND ERROR CODE - - BUFINZ=TSTREC(NREC) - IETYP=0 - NDUP =0 - -C SET THE FLAG FOR ERROR TYPE=4 (PREVIOUS RECORD WITH DUPLICATE -C RSMC, DATE/TIME AND STORM ID APPEARS TO BE VALID) - -C RECORDS THAT WERE MARKED ERRONEOUS EARLIER DO NOT RECEIVE -C FURTHER PROCESSING WITH THIS VERSION OF THE CODE. - - IF(IDUPID(NREC) .GT. 0) THEN - IETYP=IDUPID(NREC) - GO TO 190 - ENDIF - -C BASIN CHECK - - NIDBSN=999 - DO NBA=1,NBASIN - IF(STMIDZ(3:3) .EQ. IDBASN(NBA)) THEN - NIDBSN=NBA - ENDIF - ENDDO - - IF(NIDBSN .GT. 130) THEN - IETYP=1 - WRITE(6,51) NREC,STMIDZ(3:3),(IDBASN(NBA),NBA=1,NBASIN),NNNREC, - 1 ZZZREC,TSTREC(NREC) - 51 FORMAT(/'******RECORD #',I3,' HAS BAD BASIN CODE=',A1,'. ALLOWED', - 2 ' CODES ARE:',1X,11(A1,1X)/2(1X,'@@@',A,'@@@'/),4X,A) - -C CHECK THAT THE LAT/LON CORRESPONDS TO A VALID BASIN - - ELSE - DO IV=3,4 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 TSTREC(NREC)) - VITVAR(IV)=REAL(IVTVAR(IV))*VITFAC(IV) - ENDDO - IF(LATNS .EQ. 'S') STMLTZ=-STMLTZ - IF(LONEW .EQ. 'W') STMLNZ=360.-STMLNZ - CALL BASNCK(STMIDZ,STMLTZ,STMLNZ,NBAZ,IPRT,IER) - IF(IER .EQ. 3) THEN - IETYP=6 - WRITE(6,61) NREC,STMIDZ,STMLTZ,STMLNZ,IETYP,NNNREC,ZZZREC, - 1 TSTREC(NREC) - 61 FORMAT(/'******RECORD #',I3,' WITH STMID=',A,' HAS LAT/LON ', - 1 'OUTSIDE BASIN LIMITS. LAT/LON=',2F9.1,' IETYP=',I3/ - 2 2(1X,'@@@',A,'@@@'/),4X,A) - ENDIF - ENDIF - - IF(IETYP .EQ. 0) THEN - -C CHECK CODED STORM ID NUMBER: ID NUMBERS GREATER >= 80 ARE -C CONSIDERED ERRONEOUS. ! CHG. TESTID - - CALL DECVAR(ISTIDC,ISTIDC+ITWO-1,KSTORM,IERDEC,'(I2.2)', - 1 STMIDZ) - IF(KSTORM .LT. 1 .OR. KSTORM .GE. ISTMAX .OR. IERDEC .NE. 0) THEN - IETYP=2 - IF(KSTORM .GE. ISTMAX .AND. KSTORM .LT. 100) THEN - WRITE(6,94) NREC,STMIDZ(ISTIDC:ISTIDC+ITWO-1),NNNREC,ZZZREC, - 1 TSTREC(NREC) - 94 FORMAT(/'******RECORD #',I3,' HAS TEST STORM NUMBER=',A2, - 1 ' -- CONSIDER IT BAD'/2(1X,'@@@',A,'@@@'/),4X,A) - ELSE - WRITE(6,63) NREC,STMIDZ(ISTIDC:ISTIDC+ITWO-1),NNNREC,ZZZREC, - 1 TSTREC(NREC) - 63 FORMAT(/'******RECORD #',I3,' HAS BAD STORM NUMBER=',A2/ - 1 2(1X,'@@@',A,'@@@'/),4X,A) - END IF - ENDIF - -C CHECK CONSISTENCY BETWEEN STORM NAME AND STORM ID, PRESENT AND -C PAST. FIRST, CHECK FOR EXACT DUPLICATES IN THE INPUT AND -C SHORT-TERM HISTORY FILES. - - IF(IETYP .EQ. 0) THEN - DO NCK=NCHECK,NREC+1,-1 - BUFINX=SCRATC(NCK) - - IF(NCK .GT. NTEST .AND. BUFINZ(1:IFSTFL-1) .EQ. - 1 BUFINX(1:IFSTFL-1) .AND. - 2 BUFINZ(IFSTFL+1:MAXCHR) .EQ. - 3 BUFINX(IFSTFL+1:MAXCHR)) THEN - IETYP=9 - WRITE(6,64) NREC,NCK,NNNREC,ZZZREC,TSTREC(NREC),SCRATC(NCK) - 64 FORMAT(/'******RECORD #',I3,' IS IDENTICAL TO RECORD #',I3, - 1 ' WHICH IS FROM THE ORIGINAL SHORT-TERM HISTORY FILE.'/4X, - 2 'RECORDS ARE:'/2(1X,'@@@',A,'@@@'/),2(4X,A/)) - GO TO 71 - ENDIF - - IF(RSMCX .EQ. RSMCZ) THEN - -C DISABLE THE FOLLOWING TWO CHECKS IN THE CASE OF A CARDINAL -C TROPICAL STORM IDENTIFIER - - DO NCARD=1,NCRDMX - IF(STMNMZ(1:ICRDCH(NCARD)) .EQ. CARDNM(NCARD)(1:ICRDCH(NCARD)) - 1 .OR. - 2 STMNMX(1:ICRDCH(NCARD)) .EQ. CARDNM(NCARD)(1:ICRDCH(NCARD))) - 3 THEN - WRITE(6,1147) STMNMZ(1:ICRDCH(NCARD)), - 1 STMNMX(1:ICRDCH(NCARD)),NCARD,ICRDCH(NCARD) - 1147 FORMAT(/'...WE HAVE FOUND A MATCHING NAME FOR "',A,'" OR "',A, - 1 '" AT CARDINAL INDEX',I3,', FOR CHARACTERS 1-',I2,'.'/4X, - 2 'NAME CHECKING IS HEREBY DISABLED.') - GO TO 71 - ENDIF - ENDDO - -C SAME NAME BUT DIFFERENT ID - - IF(STMNMZ .NE. 'NAMELESS' .AND. - 1 STMNMZ .EQ. STMNMX .AND. STMIDZ .NE. STMIDX) THEN - IETYP=7 - IF(NCK .GT. NTEST) WRITE(6,65) NREC,STMNMZ,STMIDZ,NCK,STMIDX, - 1 NNNREC,ZZZREC,TSTREC(NREC),SCRATC(NCK) - 65 FORMAT(/'******RECORD #',I3,' HAS NAME=',A,' AND ID=',A,', BUT ', - 1 'ID IS DIFFERENT FROM VALIDATED ORIGINAL SHORT-TERM ', - 2 'HISTORY RECORD',I3/4X,' WHICH IS ',A,'. RECORDS ARE:'/ - 3 2(1X,'@@@',A,'@@@'/),2(4X,A/)) - IF(NCK .LE. NTEST) WRITE(6,66) NREC,STMNMZ,STMIDZ,NCK,STMIDX, - 1 NNNREC,ZZZREC,TSTREC(NREC),SCRATC(NCK) - 66 FORMAT(/'******RECORD #',I3,' HAS NAME=',A,' AND ID=',A,', BUT ', - 1 'ID IS DIFFERENT FROM TEST RECORD WITH LARGER INDEX',I3, - 2 ' WHICH IS ',A,'.'/4X,'RECORDS ARE:'/2(1X,'@@@',A,'@@@'/), - 3 2(4X,A/)) - IF(RSMCZ .EQ. 'JTWC' .AND. STMIDZ(1:2) .EQ. STMIDX(1:2)) THEN - IETYP=-7 - WRITE(6,165) - 165 FORMAT('###OBSERVER IS JTWC. BASIN NOT GUARANTEED TO BE ', - 1 'CONSISTENT. IETYP=-7.') - ENDIF - IF(IETYP .GT. 0) GO TO 71 - ENDIF - -C SAME ID BUT DIFFERENT NAME: NEITHER IS NAMELESS - - IF(STMNMZ .NE. 'NAMELESS' .AND. STMNMX .NE. 'NAMELESS') THEN - IF(STMIDZ .EQ. STMIDX .AND. STMNMZ .NE. STMNMX .AND. - 1 RELOCZ .EQ. ' ' .AND. RELOCX .EQ. ' ') THEN - IETYP=8 - IF(NCK .GT. NTEST) WRITE(6,67) NREC,STMIDZ,STMNMZ,NCK,STMIDX, - 1 NNNREC,ZZZREC,TSTREC(NREC),SCRATC(NCK) - 67 FORMAT(/'******RECORD #',I3,' HAS ID=',A,' AND NAME=',A,', BUT ', - 1 'NAME IS DIFFERENT FROM VALIDATED ORIGINAL'/7X,'SHORT-', - 2 'TERM HISTORY RECORD',I3,' WHICH IS ',A,'.'/7X,'RECORDS ', - 3 'ARE:'/2(1X,'@@@',A,'@@@'/),2(4X,A/)) - IF(NCK .LE. NTEST) WRITE(6,68) NREC,STMIDZ,STMNMZ,NCK,STMIDX, - 1 NNNREC,ZZZREC,TSTREC(NREC),SCRATC(NCK) - 68 FORMAT(/'******RECORD #',I3,' HAS ID=',A,' AND NAME=',A,', BUT ', - 1 'NAME IS DIFFERENT FROM TEST RECORD WITH LARGER INDEX',I3, - 2 ' WHICH IS ',A,'.'/4X,'RECORDS ARE:'/2(1X,'@@@',A,'@@@'/), - 3 2(4X,A/)) - GO TO 71 - ENDIF - ENDIF - - ENDIF - ENDDO - 71 CONTINUE - ENDIF - -C CHECK FOR RECORDS WITH IDENTICAL RSMC, DATE/TIME GROUP AND -C STORM ID. SINCE THE CURRENT RECORD IS FIRST, WE WILL SUPERCEDE -C IT WITH THE LATER RECORD - - IF(IETYP .EQ. 0) THEN - DO NCK=NREC+1,NTEST - BUFINX=TSTREC(NCK) - CALL DECVAR(ISTIDC,ISTIDC+ITWO-1,KSTMX,IERDEC,'(I2.2)', - 1 STMIDX) - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 TSTREC(NREC)) - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVRX(IV),IERDEC,FMTVIT(IV), - 1 TSTREC(NCK )) - ENDDO - - DO NBA=1,NBASIN - IF(STMIDX(3:3) .EQ. IDBASN(NBA)) THEN - NIDBSX=NBA - GO TO 91 - ENDIF - ENDDO - - 91 IF(RSMCX .EQ. RSMCZ .AND. - 1 IDATEX .EQ. IDATEZ .AND. - 2 IUTCX .EQ. IUTCZ .AND. - 3 NIDBSX .EQ. NIDBSN .AND. - 4 KSTMX .EQ. KSTORM) THEN - -C ACCUMULATE ALL RECORDS THAT HAVE THE SAME RSMC, DATE/TIME AND -C STORM ID FOR PROCESSING - - IF(NDUP .LT. NDUPMX) THEN - NDUP=NDUP+1 - INDXDP(NDUP)=NCK - - ELSE - WRITE(6,93) RSMCZ,IDATEZ,IUTCZ,STMIDZ,NDUPMX - 93 FORMAT(/'******NUMBER OF RECORDS WITH SAME RSMC=',A,', DATE=',I9, - 1 ', TIME=',I5,' AND STORM ID=',A/7X,'EXCEEDS THE MAXIMUM=', - 2 I3,'. THE PROGRAM WILL TERMINATE!!') - CALL ABORT1('STIDCK ',53) - ENDIF - - ENDIF - ENDDO - - IF(NDUP .GT. 0) THEN - CALL FIXDUP(IUNTHO,NTEST,NREC,NDUP,INDXDP,TSTREC,ZZZREC,NNNREC, - 1 IETYP) - IF(IETYP .EQ. 4) THEN - DO NDU=1,NDUP - WRITE(6,109) NDU,IABS(INDXDP(NDU)),IETYP - 109 FORMAT(/'...DUPLICATE RECORD',I3,' WITH INDEX=',I3,' HAS ', - 1 'PROBABLE DATE/TIME ERROR=',I3) - IF(INDXDP(NDU) .LT. 0) IDUPID(IABS(INDXDP(NDU)))=IETYP - ENDDO - -C CLEAR THE ERROR FLAG FOR THE CURRENT RECORD!!! - - IETYP=0 - ENDIF - ENDIF - - ENDIF - - IF(IETYP .EQ. 0) THEN - -C SKIP STORM NAME CHECK IF STORM NAME='NAMELESS' OR BASIN IS -C NEITHER ATLANTIC OR EAST PACIFIC - - IF(STMNMZ .EQ. 'NAMELESS') THEN - WRITE(6,113) STMNMZ - 113 FORMAT(/'...STORM NAME IS ',A9,' SO NO NAME CHECKING WILL BE ', - 1 'DONE') - GO TO 190 - ENDIF - - IF(NIDBSN .LE. 4) THEN - IF(NIDBSN .LE. 2) THEN - NSTBSN=-1 - DO NST=1,NSTMAX - IF(STMNMZ .EQ. STBASN(NST,NIDBSN,IYRNAM)) THEN -C WRITE(6,117) STMNMZ,NST,NIDBSN,IYRNAM -C 117 FORMAT(/'...WE HAVE FOUND MATCHING NAME FOR ',A,' AT INDEX=',I4, -C 1 ', FOR NIDBSN,IYRNAM=',2I4) - NSTBSN=NST - GO TO 171 - ENDIF - ENDDO - -C FOR EAST PACIFIC STORM IDS, CHECK THAT THEY MAY HAVE BEEN NAMED -C IN THE CENTRAL PACIFIC - - IF(NIDBSN .EQ. 2) THEN - NSTBSN=-1 - DO NST=1,NSMXCP - IF(STMNMZ .EQ. STBACP(NST)) THEN - NSTBSN=NST - GO TO 171 - ENDIF - ENDDO - ENDIF - - ELSE IF(NIDBSN .EQ. 3) THEN - NSTBSN=-1 - DO NST=1,NSMXCP - IF(STMNMZ .EQ. STBACP(NST)) THEN - NSTBSN=NST - GO TO 171 - ENDIF - ENDDO - - ELSE IF(NIDBSN .EQ. 4) THEN - NSTBSN=-1 - DO NST=1,NSMXWP - IF(STMNMZ .EQ. STBAWP(NST)) THEN - NSTBSN=NST - GO TO 171 - ENDIF - ENDDO - ENDIF - -C CHECK FOR CARDINAL NUMBER IDENTIFIER FOR AS YET UNNAMED STORMS - - DO NCARD=1,NCRDMX - IF(STMNMZ(1:ICRDCH(NCARD)) .EQ. CARDNM(NCARD)(1:ICRDCH(NCARD))) - 1 THEN - WRITE(6,147) STMNMZ(1:ICRDCH(NCARD)),NCARD,ICRDCH(NCARD) - 147 FORMAT(/'...WE HAVE FOUND MATCHING NAME FOR "',A,'" AT CARDINAL ', - 1 'INDEX',I3,', FOR CHARACTERS 1-',I2,'.') - NSTBSN=NCARD - GO TO 171 - ENDIF - ENDDO - -C CHECK FOR GREEK NAMES - - DO NGRK=1,NGRKMX - IF(STMNMZ(1:IGRKCH(NGRK)) .EQ. GREKNM(NGRK)(1:IGRKCH(NGRK))) - 1 THEN - WRITE(6,157) STMNMZ(1:IGRKCH(NGRK)),NGRK,IGRKCH(NGRK) - 157 FORMAT(/'...WE HAVE FOUND MATCHING GREEK NAME FOR "',A,'" AT ', - 1 'GREEK INDEX',I3,', FOR CHARACTERS 1-',I2,'.') - NSTBSN=NGRK - GO TO 171 - ENDIF - ENDDO - - 171 IF(NSTBSN .LT. 0) THEN - IETYP=5 - WRITE(6,173) NREC,STMNMZ,NIDBSN,IYRNAM,NNNREC,ZZZREC,TSTREC(NREC) - 173 FORMAT(/'+++RECORD #',I3,' HAS BAD STORM NAME=',A9,'. NIDBSN,', - 1 'IYRNAM=',2I4/4X,'ERROR RECOVERY WILL BE CALLED FOR THIS', - 2 ' RECORD:'/2(1X,'@@@',A,'@@@'/),4X,A) - - CALL FIXNAM(IUNTCA,NIDBSN,IYR,IETYP,STMNMZ,TSTREC(NREC)) - - ENDIF - - ELSE - WRITE(6,181) IDBASN(NIDBSN),STMNMZ - 181 FORMAT('...VALID BASIN ID=',A1,' DOES NOT ALLOW STORM NAME CHECK', - 1 ' AT THIS TIME. NAME=',A9) - ENDIF - - ENDIF - - ENDIF - - 190 IFSTCK(NUMTST(NREC))=IETYP - IF(IETYP .GT. 0) THEN - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(NREC) - BADREC(NADD+NBAD)=TSTREC(NREC) - ELSE - NOKAY=NOKAY+1 - NUMOKA(NOKAY)=NUMTST(NREC) - OKAREC(NOKAY)=TSTREC(NREC) - ENDIF - - ENDDO - - WRITE(6,201) NOKAY,NADD,NTEST,(ERCID(NER),NER=1,NERCID) - 201 FORMAT(//'...RESULTS OF THE STORM ID CHECK ARE: NOKAY=',I4,' AND', - 1 ' NADD=',I4,' FOR A TOTAL OF ',I4,' RECORDS.'//4X, - 2 'ERROR CODES ARE:'/(6X,A)) - WRITE(6,203) - 203 FORMAT(/'...OKAY RECORDS ARE:',100X,'ERC'/) - DO NOK=1,NOKAY - WRITE(6,209) NOK,NUMOKA(NOK),OKAREC(NOK),IFSTCK(NUMOKA(NOK)) - 209 FORMAT(3X,I4,'...',I4,'...',A,'...',I3) - ENDDO - IF(NADD .GT. 0) WRITE(6,211) (NBAD+NBA,NUMBAD(NBAD+NBA), - 1 BADREC(NBAD+NBA), - 2 IFSTCK(NUMBAD(NBAD+NBA)), - 3 NBA=1,NADD) - 211 FORMAT(/' ADDED BAD RECORDS ARE:',95X,'ERC'/(3X,I4,'...',I4, - 1 '...',A,'...',I3)) - NBAD=NBAD+NADD - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: FIXDUP ERROR RECOVERY FOR PARTIAL DUPLICATE RECS -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: ERROR RECOVERY FOR PARTIAL DUPLICATE RECORDS. PARTIAL -C DUPLICATE RECORDS ARE DEFINED AS THOSE WITH IDENTICAL RSMC, STORM -C ID & NAME, AND DATE/TIME. THE ERROR RECOVERY PROCEDURE BEGINS BY -C TRYING TO FIND A PREVIOUS RECORD FOR THE TARGET RECORD, WHICH IS -C DEFINED AS THE FIRST OF THE DUPLICATE RECORDS (ALL SUBSEQUENT -C RECORDS ARE DEFINED AS "DUPLICATES"). THE CURRENT RECORDS ARE -C SEARCHED FIRST, THEN THE SHORT-TERM HISTORY FILE IS SEARCHED. -C IF NO PREVIOUS RECORDS ARE FOUND ANYWHERE, THE DEFAULT DECISION IS -C TO KEEP THE LAST OF THE DUPLICATES, UNDER THE ASSSUMPTION THAT -C THE DUPLICATE RECORDS ARE UPDATE RECORDS FOR THE SAME STORM. -C IF A PREVIOUS RECORD IS FOUND, ITS EXTRAPOLATED POSITION IS COMPARED -C WITH THE TARGET RECORD AND THE DUPLICATE RECORDS. IF THE TARGET -C POSITION ERROR IS GREATER THAN THE DUPLICATE POSITION, THE -C TARGET RECORD IS CONSIDERED ERROREOUS. IF THE TARGET POSITION ERROR -C IS LESS THAN THE DUPLICATE POSITION ERROR, THE DUPLICATE POSITION -C IS CHECKED AGAINST AN EXTRAPOLATED FUTURE POSITION. IF THAT ERROR -C IS LESS THAN FOR THE CURRENT POSITION, IT IS ASSUMED THAT THE -C DUPLICATE RECORD HAS A DATE/TIME ERROR. IF THE DUPLICATE POSITION -C ERROR IS LARGER FOR THE FUTURE TIME, IT IS ASSUMED THAT THE -C DUPLICATE RECORD IS AN UPDATE RECORD WHICH SUPERCEDES THE TARGET. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C -C USAGE: CALL FIXDUP(IUNTHO,NTEST,NREC,NDUP,INDXDP,TSTREC,ZZZREC, -C NNNREC,IETYP) -C INPUT ARGUMENT LIST: -C IUNTHO - UNIT NUMBER FOR SHORT-TERM HISTORY FILE. -C NTEST - TOTAL NUMBER OF RECORDS AVAILABLE (DIMENSION OF TSTREC) -C NREC - INDEX NUMBER OF TARGET RECORD -C NDUP - NUMBER OF DUPLICATE RECORDS -C INDXDP - INTEGER ARRAY CONTAINING INDEX NUMBERS OF -C - DUPLICATE RECORDS -C TSTREC - CHARACTER ARRAY OF INPUT RECORDS. -C ZZZREC - CHARACTER VARIABLE CONTAINING VARIABLE NAMES. -C NNNREC - CHARACTER VARIABLE CONTAINING COLUMN NUMBERS. -C -C OUTPUT ARGUMENT LIST: -C IETYP - ERROR CODE -C -C INPUT FILES: -C UNIT 21 - SHORT-TERM HISTORY FILE -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE FIXDUP(IUNTHO,NTEST,NREC,NDUP,INDXDP,TSTREC,ZZZREC, - 1 NNNREC,IETYP) - - PARAMETER (MAXSTM=70) - - SAVE - - CHARACTER*(*) TSTREC(0:NTEST),ZZZREC,NNNREC - - DIMENSION INDXDP(NDUP) - - DIMENSION RINC(5) - - CHARACTER STMNAM*9,STMID*3,RSMC*4 - - LOGICAL FSTFLG - - DIMENSION STMNAM(MAXSTM),STMLAT(MAXSTM),STMLON(MAXSTM), - 1 STMDIR(MAXSTM),STMSPD(MAXSTM),IDATE(MAXSTM), - 2 IUTC(MAXSTM),RMAX(MAXSTM),PENV(MAXSTM),PCEN(MAXSTM), - 3 PTOP(MAXSTM),RSMC(MAXSTM),RMW(MAXSTM),VMAX(MAXSTM), - 4 R15NW(MAXSTM),R15NE(MAXSTM),R15SE(MAXSTM),R15SW(MAXSTM), - 5 STMID(MAXSTM),FSTFLG(MAXSTM) - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - PARAMETER (NBASIN=11) - - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 LATNS*1,LONEW*1,FMTVIT*6,BUFINZ*100,RELOCZ*1,IDBASN*1 - - DIMENSION IVTVAR(MAXVIT),VITVAR(MAXVIT),VITFAC(MAXVIT), - 1 ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION IDBASN(NBASIN),BUFIN(MAXCHR),FMTVIT(MAXVIT) - - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - EQUIVALENCE (VITVAR( 3),STMLTZ),(VITVAR( 4),STMLNZ), - 1 (VITVAR( 5),STMDRZ),(VITVAR( 6),STMSPZ) - - DIMENSION IVTVRX(MAXVIT),VITVRX(MAXVIT) - - CHARACTER BUFCK(MAXCHR)*1,RSMCX*4,RELOCX*1,STMIDX*3,LATNSX*1, - 1 LONEWX*1,BUFINX*100 - - EQUIVALENCE (BUFCK(1),RSMCX),(BUFCK(5),RELOCX),(BUFCK(6),STMIDX), - 1 (BUFCK(35),LATNSX),(BUFCK(41),LONEWX), - 2 (BUFCK(1),BUFINX) - - EQUIVALENCE (IVTVRX(1),IDATEX),(IVTVRX(2),IUTCX), - 1 (VITVRX(3),STMLTX),(VITVRX(4),STMLNX), - 2 (VITVRX(5),STMDRX),(VITVRX(6),STMSPX) - - DATA VITFAC/2*1.0,2*0.1,1.0,0.1,9*1.0/, - 1 FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 2 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 3 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 4 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/ - - DATA IDBASN/'L','E','C','W','O','T','U','P','S','B','A'/ - -C IPRNT : CONTROLS PRINTING IN SUBROUTINE NEWVIT -C FACSPD: CONVERSION FACTOR FOR R(DEG LAT)=V(M/S)*T(FRAC DAY)* -C FACSPD - - DATA NUM/1/,ITWO/2/,ISTIDC/1/,IPRNT/0/,FACSPD/0.77719/, - 1 IHRWIN/0/ - - WRITE(6,1) NDUP,NTEST,NREC - 1 FORMAT(/'...ENTERING FIXDUP WITH ',I3,' DUPLICATE RECORDS AND',I4, - 1 ' TOTAL RECORDS. TARGET RECORD TO BE CHECKED HAS INDEX=', - 2 I3) - -C RECOVER STORM ID, DATE,TIME ETC FROM THE TARGET RECORD - - BUFINZ=TSTREC(NREC) - CALL DECVAR(ISTIDC,ISTIDC+ITWO-1,KSTORM,IERDEC,'(I2.2)', - 1 STMIDZ) - DO IV=1,6 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) - VITVAR(IV)=IVTVAR(IV)*VITFAC(IV) - ENDDO - IF(LATNS .EQ. 'S') STMLTZ=-STMLTZ - IF(LONEW .EQ. 'W') STMLNZ=360.-STMLNZ - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - - WRITE(6,7) BUFINZ,(INDXDP(ND),TSTREC(INDXDP(ND)),ND=1,NDUP) - 7 FORMAT('...TARGET RECORD FOR COMPARISON IS:'/10X,A/4X, - 1 'DUPLICATE RECORDS ARE:'/(4X,I4,2X,A)) -C WRITE(6,9) STMLTZ,STMLNZ,STMDRZ,STMSPZ -C 9 FORMAT('...LAT/LON, DIR/SPD OF TARGET RECORD ARE ',4F10.3) - -C CHECK IF THERE ARE ANY PREVIOUS RECORDS IN TSTREC - - INDCLO=-99 - DTCLO=1.E10 - DO NCK=1,NTEST - BUFINX=TSTREC(NCK) - CALL DECVAR(ISTIDC,ISTIDC+ITWO-1,KSTMX,IERDEC,'(I2.2)', - 1 STMIDX) - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVRX(IV),IERDEC,FMTVIT(IV), - 1 TSTREC(NCK)) - ENDDO - - DO NBA=1,NBASIN - IF(STMIDX(3:3) .EQ. IDBASN(NBA)) NIDBSX=NBA - IF(STMIDZ(3:3) .EQ. IDBASN(NBA)) NIDBSN=NBA - ENDDO - - IF(RSMCX .EQ. RSMCZ .AND. - 1 NIDBSX .EQ. NIDBSN .AND. - 2 KSTMX .EQ. KSTORM .AND. - 3 NCK .NE. NREC ) THEN - CALL ZTIME(IDATEX,IUTCX,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYX) -C WRITE(6,53) NCK,IDATEX,IUTCX,DAYX -C 53 FORMAT('...INDEX,DATE,TIME OF SAME STORM ARE:',I3,I9,I5,F10.3) - - IF(DAYX .LT. DAYZ .AND. DAYZ-DAYX .LT. DTCLO) THEN - INDCLO=NCK - DTCLO=DAYZ-DAYX - ENDIF - - ENDIF - - ENDDO - - IF(INDCLO .GT. 0) THEN - BUFINX=TSTREC(INDCLO) - DO IV=3,6 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVRX(IV),IERDEC,FMTVIT(IV), - 1 BUFINX) - VITVRX(IV)=IVTVRX(IV)*VITFAC(IV) - ENDDO - IF(LATNSX .EQ. 'S') STMLTX=-STMLTX - IF(LONEWX .EQ. 'W') STMLNX=360.-STMLNX - CALL DS2UV(USTM,VSTM,STMDRX,STMSPX) - - ELSE - WRITE(6,77) IUNTHO - 77 FORMAT(/'...PREVIOUS STORM RECORD COULD NOT BE FOUND IN CURRENT ', - 1 'RECORDS. WE WILL LOOK IN THE SHORT-TERM HISTORY FILE, ', - 2 'UNIT=',I3) - -C SCAN HISTORICAL FILE FOR ALL OCCURRENCES OF EACH STORM. -C SAVE THE LATEST TIME FOR USE LATER. - - IOPT=5 - IDTREQ=IDATEZ - STMID(1)=STMIDZ - CALL NEWVIT(IUNTHO,IPRNT,IOPT,IERVIT,MAXSTM,KSTORM,IDTREQ,IHRREQ, - 1 IHRWIN,IDATE,IUTC,STMLAT,STMLON,STMDIR,STMSPD, - 2 PCEN,PENV,RMAX,VMAX,RMW,R15NE,R15SE,R15SW,R15NW, - 3 PTOP,FSTFLG,STMNAM,STMID,RSMC) - - IF(KSTORM .GT. 0) THEN - DO KST=1,KSTORM - CALL ZTIME(IDATE(KST),IUTC(KST),IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYX) -C WRITE(6,79) KST,DAYX,DAYZ -C 79 FORMAT('...INDEX,DAYX, DAYZ FROM ST. TERM HIST. FILE=',I3,2F10.3) - IF(DAYZ-DAYX .LT. DTCLO) THEN - INDCLO=KST - DTCLO=DAYZ-DAYX - ENDIF - ENDDO - - CALL DS2UV(USTM,VSTM,STMDIR(INDCLO),STMSPD(INDCLO)) - STMLTX=STMLAT(INDCLO) - STMLNX=STMLON(INDCLO) - - ELSE - WRITE(6,97) - 97 FORMAT('###PREVIOUS RECORD COULD NOT BE FOUND ANYWHERE. ', - 1 'THEREFORE, WE MAKE THE ARBITRARY, BUT NECESSARY DECISION'/ - 2 4X,'TO RETAIN THE LAST DUPLICATE RECORD.') - - IETYP=3 - WRITE(6,99) NREC,INDXDP(NDUP),NNNREC,ZZZREC,TSTREC(NREC), - 1 TSTREC(INDXDP(NDUP)) - 99 FORMAT(/'******RECORD #',I3,' WILL BE SUPERCEDED BY RECORD #',I3, - 1 ', WHICH ARRIVED LATER AND HAS IDENTICAL RSMC, DATE/TIME', - 2 ' AND STORM ID'/2(1X,'@@@',A,'@@@'/),2(4X,A/)) - RETURN - ENDIF - - ENDIF - -C SAVE THE PREVIOUS FIX POSITION AND EXTRAPOLATE IT -C TO THE CURRENT TIME - - PRVLAT=STMLTX - PRVLON=STMLNX - EXTLAT=PRVLAT+VSTM*DTCLO*FACSPD - EXTLON=PRVLON+USTM*DTCLO*FACSPD - - EXTERZ=DISTSP(STMLTZ,STMLNZ,EXTLAT,EXTLON)*1.E-3 - WRITE(6,95) STMLTZ,STMLNZ,EXTERZ - 95 FORMAT(/'...LAT/LON,EXTRAPOLATION ERROR FOR RECORDS ARE:'/4X, - 1 'TARGET:',9X,3F10.3) - - DO NDU=1,NDUP - BUFINX=TSTREC(INDXDP(NDU)) - DO IV=3,4 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVRX(IV),IERDEC,FMTVIT(IV), - 1 BUFINX) - VITVRX(IV)=IVTVRX(IV)*VITFAC(IV) - ENDDO - IF(LATNSX .EQ. 'S') STMLTX=-STMLTX - IF(LONEWX .EQ. 'W') STMLNX=360.-STMLNX - EXTERD=DISTSP(STMLTX,STMLNX,EXTLAT,EXTLON)*1.E-3 - WRITE(6,111) NDU,STMLTX,STMLNX,EXTERD - 111 FORMAT('...DUP. RECORD:',I4,3F10.3) - - IF(EXTERD .GT. EXTERZ) THEN - EXTLT2=PRVLAT+VSTM*DTCLO*FACSPD*2.0 - EXTLN2=PRVLON+USTM*DTCLO*FACSPD*2.0 - EXTER2=DISTSP(STMLTX,STMLNX,EXTLT2,EXTLN2)*1.E-3 - WRITE(6,113) NDU,EXTLT2,EXTLN2,EXTER2 - 113 FORMAT('...2XDT EXTRAP:',I4,3F10.3) - -C IF THE DIFFERENCE BETWEEN THE DUPLICATE POSITION AND -C AN EXTRAPOLATED POSITION TO A FUTURE CYCLE IS LESS -C THAN THE DIFFERENCE AT THE CURRENT TIME, WE ASSUME -C THAT THE DUPLICATE HAS A BAD DATE/TIME, I.E. THAT IT -C IS VALID A A LATER TIME. CURRENTLY THERE IS NO ERROR -C RETRIEVAL FOR THE DATE/TIME GROUP SO THAT THIS RECORD -C IS MARKED TO BE IN ERROR BY MAKING THE INDEX NEGATIVE. - - IF(EXTER2 .LT. EXTERD) THEN - IETYP=4 - INDXDP(NDU)=-INDXDP(NDU) - WRITE(6,117) IETYP,INDXDP(NDU) - 117 FORMAT(/'...DUPLICATE HAS DIFFERENCE WITH EXTRAPOLATED POSITION ', - 1 'TO FUTURE TIME THAT IS LESS THAN FOR CURRENT TIME.'/4X, - 2 'THEREFORE, WE CONCLUDE THAT THERE IS A DATE/TIME ERROR ', - 3 'IN THE DUPLICATE RECORD (IETYP=',I3,').'/4X,'THE INDEX=', - 4 I3,' IS MARKED NEGATIVE TO INDICATE AN ERROR.') - - ELSE - IETYP=3 - WRITE(6,119) NREC,INDXDP(NDUP),NNNREC,ZZZREC,TSTREC(NREC), - 1 TSTREC(INDXDP(NDUP)) - 119 FORMAT(/'...DUPLICATE HAS DIFFERENCE WITH EXTRAPOLATED FUTURE ', - 1 'POSITION GREATER THAN THAT FOR CURRENT POSITION.'/ - 2 ' ******RECORD #',I3,' WILL BE SUPERCEDED BY RECORD #',I3, - 3 ', WHICH ARRIVED LATER AND HAS IDENTICAL RSMC, DATE/TIME', - 4 ' AND STORM ID'/2(1X,'@@@',A,'@@@'/),2(4X,A/)) - ENDIF - - ELSE - IETYP=3 - WRITE(6,121) NREC,INDXDP(NDUP),NNNREC,ZZZREC,TSTREC(NREC), - 1 TSTREC(INDXDP(NDUP)) - 121 FORMAT(/'...DUPLICATE HAS DIFFERENCE WITH EXTRAPOLATED PAST ', - 1 'POSITION LESS THAN OR EQUAL TO THAT FOR TARGET.'/ - 2 ' ******RECORD #',I3,' WILL BE SUPERCEDED BY RECORD #',I3, - 3 ', WHICH ARRIVED LATER AND HAS IDENTICAL RSMC, DATE/TIME', - 4 ' AND STORM ID'/2(1X,'@@@',A,'@@@'/),2(4X,A/)) - ENDIF - - ENDDO - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: FIXNAM NAME RECOVERY FOR SYNDAT_QCTROPCY -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: ERRONEOUS STORM NAMES ARE CHECKED FOR OLD (RETIRED) STORM -C NAMES (ATLANTIC BASIN ONLY). IF A RETIRED NAME MATCHES THE -C INPUT STORM NAME, ERROR RECOVERY IS SUCCESSFUL. SEE REMARKS BELOW. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C 1993-08-25 S. LORD ADDED CATALOG CHECKING FOR STORM IDS -C -C USAGE: CALL FIXNAM(IUNTCA,NIDBSN,IYRN,IETYP,STMNAM,DUMREC) -C INPUT ARGUMENT LIST: -C IUNTCA - STORM CATALOG UNIT NUMBER -C NIDBSN - BASIN INDEX -C IYRN - 4 DIGIT YEAR OF STORM (YYYY) -C IETYP - INPUT ERROR CODE (SHOULD BE POSITIVE) -C STMNAM - CHARACTER VARIABLE CONTAINING ERRONEOUS STORM NAME -C -C OUTPUT ARGUMENT LIST: -C IETYP - SIGN OF INPUT IETYP IS CHANGED TO NEGATIVE IF -C - RECOVERY IS SUCCESSFUL -C DUMREC - CHARACTER VARIABLE CONTAINING ENTIRE INPUT DATA RECORD -C - WITH CORRECTED NAME. -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE FIXNAM(IUNTCA,NIDBSN,IYRN,IETYP,STMNAM,DUMREC) - - PARAMETER (NRETIR= 7) - - SAVE - - CHARACTER*(*) STMNAM,DUMREC - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - PARAMETER (NBASIN=11) - - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 LATNS*1,LONEW*1,FMTVIT*6,BUFINZ*100,RELOCZ*1,NABASN*16 - - DIMENSION IVTVAR(MAXVIT),ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION NABASN(NBASIN),BUFIN(MAXCHR),FMTVIT(MAXVIT) - - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - CHARACTER RETNAM(NRETIR,NBASIN)*9 - DIMENSION IRETYR(NRETIR,NBASIN),NUMRET(NBASIN) - - DIMENSION RINC(5) - - DATA FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 1 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 2 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 3 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/ - - DATA NABASN/'ATLANTIC ','EAST PACIFIC ', - 1 'CENTRAL PACIFIC ','WEST PACIFIC ', - 2 'SOUTH CHINA SEA ','EAST CHINA SEA ', - 3 'AUSTRALIA ','SOUTH PACIFIC ', - 4 'SOUTH INDIAN OCN','BAY OF BENGAL ', - 5 'NRTH ARABIAN SEA'/ - - DATA RETNAM/'GILBERT ','JOAN ','HUGO ','GLORIA ', - 1 'DIANA ','BOB ','ANDREW ',70*' '/ - - DATA IRETYR/1988,1988,1989,1985,1990,1991,1992, - 1 70*00/ - - DATA NUMRET/7,1,9*0/,DYSPMX/2.0/ - - RETNAM(1,2)='INIKI' - IRETYR(1,2)=1992 - - BUFINZ=DUMREC - DO INUM=1,NUMRET(NIDBSN) - IF(STMNAM .EQ. RETNAM(INUM,NIDBSN) .AND. - 1 IYRN .EQ. IRETYR(INUM,NIDBSN)) THEN - WRITE(6,3) NABASN(NIDBSN),STMNAM,IYRN - 3 FORMAT(/'...SUCESSFUL RECOVERY OF STORM NAME FROM RETIRED STORM ', - 1 'NAMES OF THE ',A,'. NAME, YEAR=',A,1X,I5) - STMNMZ=STMNAM - DUMREC=BUFINZ - IETYP=-IETYP - RETURN - ENDIF - ENDDO - -C LOOK FOR NAME IN STORM CATALOG. IF THERE, CHECK THAT IT IS A -C RECENT STORM. IF SO, ASSUME THAT THE STORM ID IS OK. - - CALL STCATN(IUNTCA,STMNAM,IDATCA,IUTCCA,IFND) - IF(IFND .EQ. 0) THEN - WRITE(6,101) STMNAM - 101 FORMAT(/'...UNSUCESSFUL ATTEMPT TO RECOVER STORM NAME ...',A, - 1 '... HAS OCCURRED.') - ELSE - -C NOW CHECK DATE VERSUS SUBJECT RECORD - - do iv=1,2 - call decvar(istvar(iv),ienvar(iv),ivtvar(iv),ierdec,fmtvit(iv), - 1 bufinz) - enddo - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - - CALL ZTIME(IDATCA,IUTCCA,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYCA) - WRITE(6,133) IDATEZ,IUTCZ,IDATCA,IUTCCA,DAYZ,DAYCA - 133 FORMAT('...COMPARING DATES BETWEEN RECORD AND CATALOG. IDATEZ, ', - 1 'IUTCZ=',I9,I5,' IDATCA,IUTCCA=',I9,I5/4X,'DAYZ,DAYCA=', - 2 2F12.3) - IF(ABS(DAYZ-DAYCA) .GT. DYSPMX) RETURN - IETYP=-IETYP - WRITE(6,201) STMNAM - 201 FORMAT(/'...SUCESSFUL ATTEMPT TO RECOVER STORM NAME ...',A, - 1 '... HAS OCCURRED.') - ENDIF - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: SECVCK SECONDARY VARIABLE Q/C CHECKING -C PRGMMR: S. LORD ORG: NP22 DATE: 1990-11-01 -C -C ABSTRACT: SECONDARY VARIABLES ARE: STORM DIRECTION AND SPEED, -C PCEN (CENTRAL PRESSURE), RMAX (RADIUS OF THE OUTERMOST CLOSED -C ISOBAR), PENV (PRESSURE AT RMAX), AND VMAX (MAXIMUM WIND SPEED). -C THIS ROUTINE CHECKS FOR MISSING AND OUT OF BOUNDS VALUES. -C FOR RMAX, PENV, AND VMAX, VALUES ARE SUBSTITUTED FROM THE LATEST -C HISTORICAL Q/C CHECKED RECORD IF THAT RECORD IS NO MORE THAN 12 -C HOURS OLD. -C -C PROGRAM HISTORY LOG: -C 1990-11-01 S. LORD -C 1991-11-17 S. LORD REVISED FOR MULTIPLE ERRORS -C 1992-08-20 S. LORD ADDED THE JTWC MEMORIAL SWITCH CHECK -C 1992-09-04 S. LORD ADDED PRESSURE WIND RELATIONSHIP -C -C USAGE: CALL SECVCK(IUNTOK,NTEST,NOKAY,NBAD,NUMTST,NUMOKA,NUMBAD, -C DAY0,DAYMIN,DAYMX1,DAYOFF,IFSECV,ZZZREC,NNNREC, -C SCRREC,TSTREC,BADREC,OKAREC) -C INPUT ARGUMENT LIST: -C IUNTOK - UNIT NUMBER FOR PRELIMINARY QUALITY CONTROLLED FILE. -C NTEST - NUMBER OF RECORDS TO BE TESTED. -C NUMTST - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH RECORD -C - TO BE TESTED. -C DAY0 - DATE AT WHICH THIS Q/C CHECK IS BEING MADE. -C - UNITS ARE DDD.FFF, WHERE DDD=JULIAN DAY, FFF=FRAC- -C - TIONAL DAY (E.G. .5=1200 UTC). -C DAYMIN - EARLIEST (MINIMUM) DATE FOR CONSTRUCTION OF A -C - HISTORICAL TRACK FOR EACH STORM. -C - UNITS SAME AS DAY0 ABOVE. -C DAYMX1 - LATEST (MAXIMUM) DATE FOR CONSTRUCTION OF HISTORICAL -C - TRACK FOR EACH STORM. UNITS ARE SAME AS DAY0 ABOVE. -C DAYOFF - OFFSET ADDED TO DAYMX1 IF DAYMIN REFERS TO THE YEAR -C - BEFORE DAYMX1. -C ZZZREC - CHARACTER VARIABLE CONTAINING VARIABLE NAMES. -C NNNREC - CHARACTER VARIABLE CONTAINING COLUMN NUMBERS. -C TSTREC - CHARACTER ARRAY CONTAINING RECORDS TO BE TESTED. -C -C OUTPUT ARGUMENT LIST: -C NOKAY - NUMBER OF RECORDS THAT PASSED THE SEC. VAR. CHECK. -C NBAD - NUMBER OF RECORDS THAT FAILED THE SEC. VAR. CHECK. -C IFSECV - INTEGER ARRAY CONTAINING ERROR CODE FOR EACH INPUT -C - RECORD. SEE COMMENTS IN PGM FOR KEY TO ERROR CODES. -C SCRREC - SCRATCH CHARACTER*9 ARRAY -C NUMOKA - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH GOOD -C - RECORD. -C NUMBAD - INTEGER ARRAY CONTAINING INDEX NUMBER OF EACH BAD -C - RECORD. -C BADREC - CHARACTER ARRAY CONTAINING BAD RECORDS THAT FAILED -C - THE SEC. VAR. CHECK. -C OKAREC - CHARACTER ARRAY CONTAINING ALL RECORDS THAT PASSED -C - THE SEC. VAR. CHECK. -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: WARNING: RECORDS WITH CORRECT FORMAT BUT MISSING OR -C ERRONEOUS DATA MAY BE MODIFIED BY THIS ROUTINE!! -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE SECVCK(IUNTOK,NTEST,NOKAY,NBAD,NUMTST,NUMOKA,NUMBAD, - 1 DAY0,DAYMIN,DAYMX1,DAYOFF,IFSECV,ZZZREC,NNNREC, - 2 SCRREC,TSTREC,BADREC,OKAREC) - - PARAMETER (NPRVMX=61) - PARAMETER (MAXSTM=70) - PARAMETER (NERCSV=9) - PARAMETER (MAXREC=1000) - - SAVE - - CHARACTER*(*) ZZZREC,NNNREC,SCRREC(0:NTEST),TSTREC(0:NTEST), - 1 BADREC(MAXREC),OKAREC(NTEST),ERCSV(NERCSV)*60, - 2 STDPTP(-NPRVMX:-1)*1,SUBTOP*1,SUBFLG*1 - - LOGICAL NEWSTM - - DIMENSION NUMOKA(NTEST),IFSECV(MAXREC),NUMBAD(MAXREC), - 1 NUMTST(NTEST) - - DIMENSION NUMSTM(MAXSTM),INDXST(MAXSTM,MAXSTM),IOPSTM(MAXSTM), - 1 SRTDAY(MAXSTM,MAXSTM),IDASRT(MAXSTM) - - DIMENSION STLATP(-NPRVMX:-1),STLONP(-NPRVMX:-1), - 1 STDAYP(-NPRVMX: 0),STVMXP(-NPRVMX:-1), - 2 STDIRP(-NPRVMX:-1),STSPDP(-NPRVMX:-1), - 3 STPCNP(-NPRVMX:-1),STPENP(-NPRVMX:-1), - 4 STRMXP(-NPRVMX:-1) - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - PARAMETER (MAXTPC= 3) - PARAMETER (NBASIN=11) - PARAMETER (ISECVR= 5,ITERVR=10) - PARAMETER (NSECVR=ITERVR-ISECVR) - PARAMETER (NTERVR=MAXVIT-ITERVR+1) - - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 SHALO*1,MEDIUM*1,DEEP*1,LATNS*1,LONEW*1,FMTVIT*6, - 2 BUFINZ*100,STMREQ*9,RELOCZ*1,STMTPC*1,EXE*1,NAMVAR*5, - 3 IDBASN*1,NABASN*16 - - DIMENSION IVTVAR(MAXVIT),VITVAR(MAXVIT),VITFAC(MAXVIT), - 1 ISTVAR(MAXVIT),IENVAR(MAXVIT) - - DIMENSION NAMVAR(MAXVIT+1),IDBASN(NBASIN),NABASN(NBASIN), - 1 BUFIN(MAXCHR),STMTPC(0:MAXTPC),FMTVIT(MAXVIT) - - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - EQUIVALENCE (VITVAR( 3),STMLTZ),(VITVAR( 4),STMLNZ), - 1 (VITVAR( 5),STMDRZ),(VITVAR( 6),STMSPZ), - 2 (VITVAR( 7),PCENZ) - - EQUIVALENCE (STMTPC(0), EXE),(STMTPC(1),SHALO),(STMTPC(2),MEDIUM), - 1 (STMTPC(3),DEEP) - -C **** NOTE: SECBND AND PRVSVR ARE DIMENSIONED NSECVR+1 TO CARRY -C SPACE FOR VMAX, WHICH IS NOT STRICTLY A SECONDARY VARIABLE. -C THEREFORE, WE DO NOT ALLOW MISSING OR ERRONEOUS VALUES -C OF VMAX TO CAUSE RECORDS TO BE REJECTED. - -C ****NOTE: DEPTH OF CYCLONIC CIRCULATION IS CLASSIFIED AS A -C SECONDARY VARIABLE - - DIMENSION RINC(5) - - DIMENSION SECBND(NSECVR+1,2),PRVSVR(NSECVR+1,-NPRVMX:-1), - 1 TERBND(NTERVR,2),IERROR(NSECVR+2) - - EQUIVALENCE (DIRMN ,SECBND(1,1)),(DIRMX ,SECBND(1,2)), - 1 (SPDMN ,SECBND(2,1)),(SPDMX ,SECBND(2,2)), - 2 (PCENMN,SECBND(3,1)),(PCENMX,SECBND(3,2)), - 3 (PENVMN,SECBND(4,1)),(PENVMX,SECBND(4,2)), - 4 (RMAXMN,SECBND(5,1)),(RMAXMX,SECBND(5,2)), - 5 (VMAXMN,TERBND(1,1)),(VMAXMX,TERBND(1,2)) - - DATA SHALO/'S'/,MEDIUM/'M'/,DEEP/'D'/,EXE/'X'/, - 1 VITFAC/2*1.0,2*0.1,1.0,0.1,9*1.0/, - 2 FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 3 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 4 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 5 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/ - - DATA IDBASN/'L','E','C','W','O','T','U','P','S','B','A'/ - - DATA NABASN/'ATLANTIC ','EAST PACIFIC ', - 1 'CENTRAL PACIFIC ','WEST PACIFIC ', - 2 'SOUTH CHINA SEA ','EAST CHINA SEA ', - 3 'AUSTRALIA ','SOUTH PACIFIC ', - 4 'SOUTH INDIAN OCN','BAY OF BENGAL ', - 5 'NRTH ARABIAN SEA'/ - - DATA NAMVAR/'DATE ','TIME ','LAT. ','LONG.','DIR ','SPEED', - 1 'PCEN ','PENV ','RMAX ','VMAX ','RMW ','R15NE', - 2 'R15SE','R15SW','R15NW','DEPTH'/ - -C RMISPR: MISSING CODE FOR RMAX, PCEN AND PENV -C RMISV: MISSING CODE FOR MAX. TANGENTIAL WIND (VMAX) -C EPSMIS: TOLERANCE FOR MISSING VMAX -C FIVMIN: FIVE MINUTES IN UNITS OF FRACTIONAL DAYS -C DTPERS: MAXIMUM TIME SEPARATION FOR SUBSTITUTION OF MISSING -C SECONDARY INFORMATION USING PERSISTENCE (12 HOURS) -C BOUNDS FOR SECONDARY VARIABLES: -C DIRMN =0.0 DEG DIRMX =360 DEG -C SPDMN =0.0 M/S SPDMX =30 M/S -C PCENMN=880 MB PCENMX=1020 MB -C PENVMN=970 MB PENVMX=1050 MB -C RMAXMN=100 KM RMAXMX=999 KM -C VMAXMN=7.7 M/S VMAXMX=100 M/S - - DATA RMISV/-9.0/,RMISPR/-999.0/,EPSMIS/1.E-1/,NUM/1/, - 1 FIVMIN/3.4722E-3/,DTPERS/0.5/ - - DATA DIRMN/0.0/,DIRMX/360./,SPDMN/0.0/,SPDMX/30./, - 1 PCENMN/880./,PCENMX/1020./,PENVMN/970./,PENVMX/1050./, - 2 RMAXMN/100./,RMAXMX/999.0/,VMAXMN/7.7 /,VMAXMX/100./ - - DATA ERCSV - 1 /'1: UNPHYSICAL OR MISSING DIRECTION (OUTSIDE BOUNDS) ', - 2 '2: UNPHYSICAL OR MISSING SPEED (OUTSIDE BOUNDS) ', - 3 '3: UNPHYSICAL OR MISSING CENTRAL PRESSURE (OUTSIDE BOUNDS) ', - 4 '4: UNPHYSICAL OR MISSING ENV. PRESSURE (OUTSIDE BOUNDS) ', - 5 '5: UNPHYSICAL OR MISSING RMAX (OUTSIDE BOUNDS) ', - 6 '6: UNPHYSICAL OR MISSING VMAX (OUTSIDE BOUNDS) ', - 7 '7: MISSING OR UNINTERPRETABLE DEPTH OF CYCLONE CIRCULATION ', - 8 '8: COMBINATION OF TWO OF THE ERROR TYPES 1-6 ', - 9 '9: COMBINATION OF THREE OR MORE OF THE ERROR TYPES 1-6 '/ - -C ERROR CODES FOR DIRECTION/SPEED GROUP CHECK ARE AS FOLLOWS: -C NEGATIVE NUMBERS INDICATE THAT AN ERRONEOUS OR MISSING VALUE -C WAS SUBSTITUTED USING PERSISTENCE OVER THE TIME DTPERS (12 H) -C MULTIPLE ERRORS ARE HANDLED AS FOLLOWS: -C THE FIRST ERROR OCCUPIES THE LEFT-MOST DIGIT -C THE SECOND ERROR OCCUPIES THE RIGHT-MOST DIGIT -C THREE OR MORE ERRORS REVERTS TO ERROR CODE=9 - -C 1: UNPHYSICAL DIRECTION (OUTSIDE BOUNDS) -C 2: UNPHYSICAL SPEED (OUTSIDE BOUNDS) -C 3: UNPHYSICAL CENTRAL PRESSURE (OUTSIDE BOUNDS) -C 4: UNPHYSICAL ENVIRONMENTAL PRESSURE (OUTSIDE BOUNDS) -C 5: UNPHYSICAL RMAX (OUTSIDE BOUNDS) -C 6: UNPHYSICAL VMAX (OUTSIDE BOUNDS) -C 7: MISSING OR UNINTERPRETABLE DEPTH OF CYCLONE CIRCULATION -C 8: COMBINATION OF TWO OF THE ERROR TYPES 1-6 -C 9: COMBINATION OF THREE OR MORE OF THE ERROR TYPES 1-6 - - NADD=0 - WRITE(6,1) NTEST,NOKAY,NBAD,DAY0,DAYMIN,DAYMX1, - 1 DAYOFF - 1 FORMAT(//'...ENTERING SECVCK TO CHECK SECONDARY VARIABLE ERRORS.', - 1 ' NTEST,NOKAY,NBAD=',3I4/4X,'TIME PARAMETERS ARE: DAY0,', - 2 'DAYMIN,DAYMX1,DAYOFF=',4F11.3///) - - CALL WRNING('SECVCK') - -C INITIALIZE SOME VARIABLES - - NUNI=0 - NSTART=0 - SCRREC(0)='ZZZZZ' - STDAYP(0)=-999.0 - SECBND(6,1:2)=TERBND(1,1:2) - - NUMSTM(1:MAXSTM)=0 - INDXST(1:MAXSTM,1:MAXSTM)=0 - -C FOR THE READABLE RECORDS, FIND THE UNIQUE STORMS AND SAVE THE -C INDEX FOR EACH STORM - - WRITE(6,31) - 31 FORMAT(/'...RECORDS THAT WILL BE CHECKED ARE:'/) - DO NREC=1,NTEST - - BUFINZ=TSTREC(NREC) - WRITE(6,33) NREC,NUMTST(NREC),BUFINZ - 33 FORMAT('...',I4,'...',I4,'...',A) - -C DECODE DATE FOR SORTING PURPOSES - - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) - ENDDO - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - -C CATEGORIZE ALL STORMS BY THEIR STORM ID - - IOPT=5 - STMREQ=STMIDZ - -C ENDIF - - NEWSTM=.TRUE. - DO NR=NSTART,NUNI - IF(STMREQ .EQ. SCRREC(NR)) THEN - NEWSTM=.FALSE. - INDX=NR - GO TO 85 - ENDIF - ENDDO - - 85 NSTART=1 - IF(NEWSTM) THEN - NUNI=NUNI+1 - SCRREC(NUNI)=STMREQ - IOPSTM(NUNI)=IOPT - INDX=NUNI - ENDIF - - NUMSTM(INDX)=NUMSTM(INDX)+1 - INDXST(NUMSTM(INDX),INDX)=NREC - SRTDAY(NUMSTM(INDX),INDX)=DAYZ - - ENDDO - - WRITE(6,101) NUNI - 101 FORMAT(/'...NUMBER OF UNIQUE STORMS=',I4) - -C CHECK SECONDARY VARIABLES DIRECTION,SPEED, PCEN, PENV, RMAX -C VMAX AND STORM DEPTH FOR MISSING AND OUT OF BOUNDS VALUES - - DO NUNIQ=1,NUNI - - BUFINZ=TSTREC(INDXST(1,NUNIQ)) - CALL DECVAR(ISTVAR(1),IENVAR(1),IVTVAR(1),IERDEC,FMTVIT(1), - 1 BUFINZ) - - print *, ' ' - print *, ' ' - IDTTRK=-IDATEZ - CALL SETTRK(IUNTOK,IOPSTM(NUNIQ),IDTTRK,DAY0,DAYMIN, - 1 DAYMX1,DAYOFF,STMDRZ,STMSPZ,STMLTZ,STMLNZ, - 2 SCRREC(NUNIQ),IERSET) - CALL PRVSTM(STLATP,STLONP,STDIRP,STSPDP,STDAYP, - 1 STRMXP,STPCNP,STPENP,STVMXP,STDPTP,KSTPRV) - PRVSVR(1,-1:-KSTPRV:-1)=STDIRP(-1:-KSTPRV:-1) - PRVSVR(2,-1:-KSTPRV:-1)=STSPDP(-1:-KSTPRV:-1) - PRVSVR(3,-1:-KSTPRV:-1)=STPCNP(-1:-KSTPRV:-1) - PRVSVR(4,-1:-KSTPRV:-1)=STPENP(-1:-KSTPRV:-1) - PRVSVR(5,-1:-KSTPRV:-1)=STRMXP(-1:-KSTPRV:-1) - PRVSVR(6,-1:-KSTPRV:-1)=STVMXP(-1:-KSTPRV:-1) - -C SORT ALL RECORDS BY TIME FOR EACH STORM SO THAT WE CAN TAKE -C THEM IN CHRONOLOGICAL ORDER - - CALL SORTRL(SRTDAY(1:NUMSTM(NUNIQ),NUNIQ),IDASRT(1:NUMSTM(NUNIQ)), - 1 NUMSTM(NUNIQ)) - - WRITE(6,107) KSTPRV,SCRREC(NUNIQ) - 107 FORMAT(/'...READY FOR ERROR CHECK WITH KSTPRV, STMID=',I3,1X,A) - - DO NUMST=1,NUMSTM(NUNIQ) - -C INITIALIZE ERROR COUNTERS - - NTOTER=0 - NPOSER=0 - IERROR(1:NSECVR+2)=0 - - NREC=INDXST(IDASRT(NUMST),NUNIQ) - BUFINZ=TSTREC(NREC) - -C GET DATE/TIME, STORM LAT/LON, AND THE SECONDARY -C VARIABLES DIRECTION/SPEED, PCEN, PENV, RMAX -C ****NOTE: ALTHOUGH NOT STRICTLY A SECONDARY VARIABLE, VMAX -C IS CHECKED HERE SINCE IT IS NEEDED FOR CLIPER. - - DO IV=1,ITERVR - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) - VITVAR(IV)=REAL(IVTVAR(IV))*VITFAC(IV) - ENDDO - - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - JDY=IFIX(DAYZ) - - INDX00=99 - DO NP=-1,-KSTPRV,-1 - IF(ABS(STDAYP(NP)-DAYZ) .LE. FIVMIN) INDX00=NP - ENDDO - IF(ABS(DAYZ-DAY0) .LT. FIVMIN) INDX00=0 - - IF(INDX00 .EQ. 99) THEN - WRITE(6,133) INDX00 - 133 FORMAT(/'******AN INDEXING ERROR HAS OCCURRED IN SECVCK, INDX00=', - 1 I4) - CALL ABORT1('SECVCK ',133) - ENDIF - -C ERROR RECOVERY FROM PERSISTENCE IS ALWAYS POSSIBLE. RECOVERY -C FROM CLIMATOLOGY IS POSSIBLE FOR ENVIRONMENTAL PRESSURE AND -C STORM SIZE. - -C THE JMA MEMORIAL DIRECTION/SPEED CHECK IS NOW IMPLEMENTED: -C IF BOTH DIRECTION AND SPEED ARE ZERO, AND THE RSMC IS JMA, -C WE TRY TO RECOVER A BETTER DIRECTION/SPEED. - - DO IV=ISECVR,ITERVR - - RMISVR=RMISPR - SUBVAR=-99.0 - IF(IV .EQ. ITERVR) RMISVR=RMISV - IF(ABS(VITVAR(IV)-RMISVR) .LE. EPSMIS .OR. - 1 VITVAR(IV) .LT. SECBND(IV-ISECVR+1,1) .OR. - 2 VITVAR(IV) .GT. SECBND(IV-ISECVR+1,2) .OR. - 3 (IV-ISECVR+1 .LE. 2 .AND. VITVAR(5) .EQ. 0.0 .AND. - 4 VITVAR(6) .EQ. 0.0 .AND. (RSMCZ .EQ. 'JMA' .OR. - 5 RSMCZ .EQ. '!WJ' .OR. RSMCZ .EQ. '!JW'))) THEN - - NTOTER=NTOTER+1 - IF(IV-ISECVR+1 .EQ. 3) THEN - NPOSER=NPOSER+1 - IERROR(NTOTER)=IABS(IV-ISECVR+1) - ELSE - IERROR(NTOTER)=-IABS(IV-ISECVR+1) - ENDIF - - WRITE(6,141) NUNIQ,NUMST,INDX00,DAYZ,NTOTER,IERROR(NTOTER), - 1 NAMVAR(IV),VITVAR(IV),RMISVR,SECBND(IV-ISECVR+1,1), - 2 SECBND(IV-ISECVR+1,2),NNNREC,ZZZREC,TSTREC(NREC) - 141 FORMAT(//'...ERROR CHECKING NUNIQ,NUMST,INDX00,DAYZ,NTOTER,', - 1 'IERROR=',3I4,F11.3,2I4/4X,'HAS FOUND SECONDARY ', - 2 'VARIABLE ',A,' WITH VALUE=',F7.1,' MISSING OR ', - 3 'EXCEEDING BOUNDS. RMISVR,MINVAL,MAXVAL=',3F7.1/2(1X, - 4 '@@@',A,'@@@'/),4X,A) - -C NEGATE THE ERROR FLAG SO THAT IT SERVES ONLY AS A REMINDER THAT -C AN ERROR IS PRESENT - - IF(IV-ISECVR+1 .LE. 2 .AND. - 1 ((VITVAR(5) .NE. 0.0 .OR. - 2 VITVAR(6) .NE. 0.0) .OR. (RSMCZ .NE. 'JMA' .AND. - 3 RSMCZ .NE. '!WJ' .AND. RSMCZ .NE. '!JW'))) THEN - - WRITE(6,151) NAMVAR(IV),IERROR(NTOTER) - 151 FORMAT('...ERROR RECOVERY FOR ',A,' WILL BE DELAYED UNTIL DRSPCK', - 1 ' (NO LONGER CALLED).'/4X,'THE ERROR TYPE ',I3,' IS MADE ', - 2 'NEGATIVE AS A REMINDER THAT AN ERROR HAS OCCURRED.') - - ELSE - -C FOR ALL OTHER VARIABLES, IS THERE A PREVIOUS HISTORY? - - IF(KSTPRV .GT. 0) THEN - INDPER=0 - DO NP=INDX00-1,-KSTPRV,-1 - IF(ABS(PRVSVR(IV-ISECVR+1,NP)-RMISVR) .GT. EPSMIS .AND. - 1 PRVSVR(IV-ISECVR+1,NP) .GE. SECBND(IV-ISECVR+1,1) .AND. - 2 PRVSVR(IV-ISECVR+1,NP) .LE. SECBND(IV-ISECVR+1,2)) THEN - -c Because of the JMA memorial problem, we are not allowed to use -c a motionless storm as a persistence value - - if(iv-isecvr+1 .le. 2 .and. prvsvr(1,np) .eq. 0 .and. - 1 prvsvr(2,np) .eq. 0) then - ipers=0 - - else - INDPER=NP - IPERS=1 -C WRITE(6,161) INDPER,DAYZ,STDAYP(INDPER), -C 1 PRVSVR(IV-ISECVR+1,INDPER) -C 161 FORMAT(/'...INDPER,DAYZ,STDAYP(INDPER),PRVSVR(IV-ISECVR+1, -C 1 'INDPER)=',I3,3F10.3) - GO TO 221 - ENDIF - ENDIF - ENDDO - 221 CONTINUE - -C IS PERSISTENCE SUBSTITUTION POSSIBLE? - - IF(DAYZ-STDAYP(INDPER) .LE. DTPERS .AND. IPERS .EQ. 1) THEN - SUBVAR=PRVSVR(IV-ISECVR+1,INDPER) - SUBFLG='P' - IF(NPOSER .GT. 0) NPOSER=NPOSER-1 - IERROR(NTOTER)=-IABS(IERROR(NTOTER)) - WRITE(6,223) SUBVAR - 223 FORMAT('...THE MISSING OR ERRONEOUS VALUE WILL BE REPLACED BY ', - 1 'A PERSISTENCE VALUE OF ',F7.1) - -C PERSISTENCE SUBSTITUTION NOT POSSIBLE - - ELSE - IF(IV-ISECVR+1 .LE. 3) THEN - SUBVAR=0.0 - WRITE(6,224) NAMVAR(IV),DAYZ,STDAYP(INDPER),DTPERS - 224 FORMAT(/'...TIME INTERVAL TO THE CLOSEST PREVIOUS RECORD WITH ', - 1 'A NON-MISSING ',A,' EXCEEDS DTPERS OR A '/4X,'NON-', - 2 'MISSING VALUE CANNOT BE FOUND. DAYZ,PREVIOUS DAY,', - 3 'DTPERS=',3F10.3,'.'/4X,'NO RECOVERY POSSIBLE FOR THIS', - 4 ' VARIABLE.') - - ELSE - WRITE(6,225) NAMVAR(IV),DAYZ,STDAYP(INDPER),DTPERS - 225 FORMAT(/'...TIME INTERVAL TO THE CLOSEST PREVIOUS RECORD WITH ', - 1 'A NON-MISSING ',A,' EXCEEDS DTPERS OR A '/4X,'NON-', - 2 'MISSING VALUE CANNOT BE FOUND. DAYZ,PREVIOUS DAY,', - 3 'DTPERS=',3F10.3/4X,'WE WILL SUBSTITUTE A ', - 4 'CLIMATOLOGICAL VALUE.') - ENDIF - ENDIF - -C NO PRIOR HISTORY - - ELSE - IF(IV-ISECVR+1 .LE. 3) THEN - SUBVAR=0.0 - WRITE(6,226) KSTPRV - 226 FORMAT(/'...KSTPRV=',I2,' SO THERE IS NO PRIOR HISTORY AND NO ', - 1 'CHECKING. NO RECOVERY POSSIBLE FOR THIS VARIABLE.') - - ELSE - WRITE(6,227) KSTPRV - 227 FORMAT(/'...KSTPRV=',I2,' SO THERE IS NO PRIOR HISTORY AND NO ', - 1 'CHECKING. CLIMATOLOGICAL VALUES WILL BE SUBSTITUTED.') - ENDIF - ENDIF - -C CLIMATOLOGICAL VARIABLE SUBSTITUTION - - IF(SUBVAR .EQ. -99.0) THEN - DO NBA=1,NBASIN - IF(STMIDZ(3:3) .EQ. IDBASN(NBA)) THEN - IBASN=NBA - GO TO 2228 - ENDIF - ENDDO - 2228 CONTINUE - -C SUBSTITUTE A PRESSURE-WIND RELATIONSHIP FOR MAX WIND - - IF(IV .EQ. ITERVR) THEN - SUBVAR=TCPWTB(VITVAR(7),IBASN) - ELSE - SUBVAR=TCCLIM(IV,IBASN) - ENDIF - SUBFLG='C' - WRITE(6,229) NAMVAR(IV),SUBVAR,NABASN(IBASN) - 229 FORMAT(/'...FOR VARIABLE ',A,', THE CLIMATOLOGICAL VALUE IS',F7.1, - 1 ' IN THE ',A,' BASIN.') - ENDIF - - IF(SUBVAR .NE. 0.0) THEN - WRITE(TSTREC(NREC)(ISTVAR(IV):IENVAR(IV)),FMTVIT(IV)) - 1 NINT(SUBVAR/VITFAC(IV)) - TSTREC(NREC)(ISTVAR(IV)-1:ISTVAR(IV)-1)=SUBFLG - WRITE(6,2219) TSTREC(NREC) - 2219 FORMAT('...AFTER SUBSTITUTION, THE RECORD IS:'/4X,A) - BUFINZ=TSTREC(NREC) - -c Only update vitvar after direction errors have been corrected -c in the rare case for a JMA report with 0000 direction and -c 0000 speed - - if(iv-isecvr+1 .ge. 2) then - DO IVZ=1,ITERVR - CALL DECVAR(ISTVAR(IVZ),IENVAR(IVZ),IVTVAR(IVZ),IERDEC, - 1 FMTVIT(IVZ),BUFINZ) - VITVAR(IVZ)=REAL(IVTVAR(IVZ))*VITFAC(IVZ) - ENDDO - endif - ENDIF - - ENDIF - ENDIF - -C THE JTWC MEMORIAL PRESSURE SWITCHING CHECK -C IV=7 IS PCEN -C IV=8 IS PENV - - IF(IV-ISECVR+1 .EQ. 3) THEN - IF(VITVAR(IV) .GE. VITVAR(IV+1)) THEN - NTOTER=NTOTER+1 - WRITE(6,2301) VITVAR(IV),VITVAR(IV+1) - 2301 FORMAT(/'...UNPHYSICAL PCEN=',F7.1,' >= PENV=',F7.1) - IF(SUBVAR .GT. 0.0) THEN - NPOSER=NPOSER+1 - IERROR(NTOTER)=IABS(IV-ISECVR+1) - WRITE(6,2303) - 2303 FORMAT('...WE CANNOT RECOVER THIS ERROR SINCE SUBSTITUTION HAS ', - 1 'GIVEN UNPHYSICAL RESULTS.') - ELSE - IF(VITVAR(IV) .NE. RMISVR .AND. VITVAR(IV+1) .NE. RMISVR) THEN - SUBFLG='Z' - SUBVR1=VITVAR(IV) - SUBVR2=VITVAR(IV+1)-1.0 - WRITE(TSTREC(NREC)(ISTVAR(IV):IENVAR(IV)),FMTVIT(IV)) - 1 NINT(SUBVR2/VITFAC(IV)) - WRITE(TSTREC(NREC)(ISTVAR(IV+1):IENVAR(IV+1)),FMTVIT(IV+1)) - 1 NINT(SUBVR1/VITFAC(IV+1)) - TSTREC(NREC)(ISTVAR(IV)-1:ISTVAR(IV)-1)=SUBFLG - TSTREC(NREC)(ISTVAR(IV+1)-1:ISTVAR(IV+1)-1)=SUBFLG - WRITE(6,2219) TSTREC(NREC) - BUFINZ=TSTREC(NREC) - DO IVZ=1,ITERVR - CALL DECVAR(ISTVAR(IVZ),IENVAR(IVZ),IVTVAR(IVZ),IERDEC, - 1 FMTVIT(IVZ),BUFINZ) - VITVAR(IVZ)=REAL(IVTVAR(IVZ))*VITFAC(IVZ) - ENDDO - ENDIF - ENDIF - ENDIF - ENDIF - ENDDO - -C CHECK FOR MISSING DEPTH OF THE CYCLONIC CIRCULATION - - ITPC=0 - DO KTPC=1,MAXTPC - IF(STMDPZ .EQ. STMTPC(KTPC)) THEN - ITPC=KTPC -C WRITE(6,239) NUMST,STMDPZ -C 239 FORMAT('...RECORD ',I3,' HAS A PROPER CODE=',A,' FOR DEPTH OF ', -C 'THE CYCLONIC CIRCULATION.') - ENDIF - ENDDO - - IF(ITPC .EQ. 0) THEN - - SUBTOP=EXE - NTOTER=NTOTER+1 - IERROR(NTOTER)=-7 - - WRITE(6,241) NUNIQ,NUMST,INDX00,DAYZ,NTOTER,IERROR(NTOTER), - 1 STMDPZ,NNNREC,ZZZREC,TSTREC(NREC) - 241 FORMAT(//'...ERROR CHECKING NUNIQ,NUMST,INDX00,DAYZ,NTOTER,', - 1 'IERROR=',3I4,F11.3,2I4/4X,'HAS FOUND MISSING OR BAD ', - 2 'CODE=',A,' FOR DEPTH OF THE CYCLONIC CIRCULATION. ', - 3 'RECORD='/2(1X,'@@@',A,'@@@'/),4X,A) - - IF(KSTPRV .GT. 0) THEN - INDPER=0 - DO NP=INDX00-1,-KSTPRV,-1 - DO ITPC=1,MAXTPC - IF(STDPTP(NP) .EQ. STMTPC(ITPC)) THEN - INDPER=NP - SUBTOP=STDPTP(NP) - SUBFLG='P' - WRITE(6,243) INDPER,DAYZ,STDAYP(INDPER),SUBTOP - 243 FORMAT(/'...INDPER,DAYZ,STDAYP(INDPER),SUBTOP=',I3,2F10.3,1X,A) - GO TO 261 - ENDIF - ENDDO - - ENDDO - - 261 CONTINUE - IF(DAYZ-STDAYP(INDPER) .LE. DTPERS) THEN - WRITE(6,263) NAMVAR(MAXVIT+1),SUBTOP - 263 FORMAT('...THE MISSING OR ERRONEOUS VALUE OF ',A,' WILL BE ', - 1 'REPLACED BY A PERSISTENCE VALUE OF ',A) - - ELSE - - WRITE(6,273) DAYZ,STDAYP(INDPER),DTPERS - 273 FORMAT(/'...TIME INTERVAL TO THE CLOSEST PREVIOUS RECORD WITH ', - 1 'A PROPER STORM DEPTH CODE EXCEEDS DTPERS OR AN '/4X, - 2 'ACCEPTABLE VALUE CANNOT BE FOUND. ', - 3 'DAYZ,PREVIOUS DAY,DTPERS=',3F10.3/,4X,'WE WILL ', - 4 'SUBSTITUTE A CLIMATOLOGICAL VALUE.') - ENDIF - - ELSE - WRITE(6,277) KSTPRV - 277 FORMAT(/'...KSTPRV=',I2,' SO THERE IS NO PRIOR HISTORY AND NO ', - 1 'CHECKING. CLIMATOLOGICAL VALUES WILL BE SUBSTITUTED.') - ENDIF - -C DETERMINE CLIMATOLOGICAL VALUE IF NECESSARY - - IF(SUBTOP .EQ. EXE) THEN - IF(PCENZ .LE. 980.0) THEN - SUBTOP=DEEP - WRITE(6,279) PCENZ,SUBTOP - 279 FORMAT('...CLIMATOLOGICAL SUBSTITUTION OF STORM DEPTH: PCEN, ', - 1 'DEPTH=',F7.1,1X,A) - ELSE IF(PCENZ .LE. 1000.0) THEN - SUBTOP=MEDIUM - WRITE(6,279) PCENZ,SUBTOP - ELSE - SUBTOP=SHALO - WRITE(6,279) PCENZ,SUBTOP - ENDIF - SUBFLG='C' - ENDIF - - WRITE(TSTREC(NREC)(MAXCHR:MAXCHR),'(A)') SUBTOP - TSTREC(NREC)(MAXCHR-1:MAXCHR-1)=SUBFLG - WRITE(6,269) TSTREC(NREC) - 269 FORMAT('...AFTER SUBSTITUTION, THE RECORD IS:'/4X,A) - ENDIF - -C ASSIGN SUMMARY ERROR CODE - -C NO ERRORS - - IF(NTOTER .EQ. 0) THEN - IETYP=0 - ISGNER=1 - -C IF ALL ERRORS HAVE BEEN FIXED, SUMMARY CODE IS NEGATIVE - - ELSE - IF(NPOSER .EQ. 0) THEN - ISGNER=-1 - ELSE - ISGNER=1 - ENDIF - -C ADD CODE FOR DEPTH OF THE CYCLONIC CIRCULATION FIRST - - NERZ=0 - NALLER=NTOTER - IF(IABS(IERROR(NTOTER)) .EQ. 7) THEN - NERZ=1 - IETYP=7 - NALLER=NTOTER-1 - ENDIF - -C ALL OTHER ERRORS. PICK OUT ONLY ALL ERRORS THAT REMAIN OR -C ALL ERRORS THAT HAVE BEEN FIXED IF THERE ARE NO REMAINING -C ERRORS. DO NOTHING WITH OTHER ERRORS. - - DO NER=1,NALLER - IF((ISGNER .LT. 0 .AND. IERROR(NER) .LT. 0) .OR. - 1 (ISGNER .GT. 0 .AND. IERROR(NER) .GT. 0)) THEN - NERZ=NERZ+1 - - ELSE - GO TO 280 - ENDIF - - IF(NERZ .EQ. 1) THEN - IETYP=IABS(IERROR(NER)) - - ELSE IF(NERZ .EQ. 2) THEN - IETYP=IABS(IETYP)*10+IABS(IERROR(NER)) - - ELSE IF(NERZ .EQ. 3) THEN - IF(IABS(IERROR(NTOTER)) .EQ. 7) THEN - IETYP=78 - ELSE - IETYP=9 - ENDIF - - ELSE - IF(IABS(IERROR(NTOTER)) .EQ. 7) THEN - IETYP=79 - ELSE - IETYP=9 - ENDIF - ENDIF - - 280 CONTINUE - ENDDO - ENDIF - IETYP=SIGN(IETYP,ISGNER) - - WRITE(6,281) SCRREC(NUNIQ),NUMST,NUMSTM(NUNIQ),NTOTER,NPOSER, - 1 ISGNER,IETYP,(IERROR(NER),NER=1,NTOTER) - 281 FORMAT(/'...ERROR SUMMARY FOR STMID,NUMST,NUMSTM=',A,2I3,':'/4X, - 1 'NTOTER,NPOSER,ISGNER,IETYP,IERROR=',4I4/(4X,10I4)) - -C WRITE(6,287) NREC,IETYP,NUMTST(NREC),NUMST,NUNIQ,BUFINZ -C 287 FORMAT(/'...DEBUGGING, NREC,IETYP,NUMTST(NREC),NUMST,NUNIQ,', -C 1 'BUFINZ=',5I4/4X,A) - IFSECV(NUMTST(NREC))=IETYP - IF(IETYP .GT. 0) THEN - NADD=NADD+1 - NUMBAD(NADD+NBAD)=NUMTST(NREC) - BADREC(NADD+NBAD)=TSTREC(NREC) - ELSE - NOKAY=NOKAY+1 - NUMOKA(NOKAY)=NUMTST(NREC) - OKAREC(NOKAY)=TSTREC(NREC) - ENDIF - - ENDDO - - ENDDO - - WRITE(6,301) NOKAY,NADD,NTEST,(ERCSV(NER),NER=1,NERCSV) - 301 FORMAT(//'...RESULTS OF THE SECONDARY VARIABLE ERROR CHECK ARE: ', - 1 'NOKAY=',I4,' AND NADD=',I4,' FOR A TOTAL OF ',I4, - 2 ' RECORDS.'//4X,'ERROR CODES ARE:'/(6X,A)) - WRITE(6,303) - 303 FORMAT(/'...NOTES: NEGATIVE ERROR CODES (EXCEPT DIR/SPD) INDICATE' - 1 ,' SUCCESSFUL RECOVERY FROM MISSING OR ERRONEOUS DATA'/11X - 2 ,'BY SUBSTITUTION FROM PERSISTENCE.'/11X,'A NEGATIVE ERR', - 3 'OR CODE FOR DIR/SPD INDICATES THAT ERROR RECOVERY WILL ', - 4 'BE POSTPONED UNTIL THE DIR/SPD CHECK.'/11X,'MULTIPLE ', - 5 'ERRORS ARE HANDLED AS FOLLOWS:'/13X,'THE FIRST SECONDARY' - 6 ,' ERROR OCCUPIES THE LEFT-MOST DIGIT.'/13X,'THE NEXT ', - 7 'SECONDARY ERROR OCCUPIES THE RIGHT-MOST DIGIT.'/13X, - 8 'THREE OR MORE ERRORS REVERTS TO ERROR CODE=7, ETC.'/13X, - 9 'ERRORS FOR THE DEPTH OF THE CYCLONIC CIRCULATION ARE ', - A 'COUNTED SEPARATELY.'//3X,'OKAY RECORDS ARE:',100X,'ERC'/) - - DO NOK=1,NOKAY - WRITE(6,309) NOK,NUMOKA(NOK),OKAREC(NOK),IFSECV(NUMOKA(NOK)) - 309 FORMAT(3X,I4,'...',I4,'...',A,'...',I3) - ENDDO - IF(NADD .GT. 0) WRITE(6,311) (NBAD+NBA,NUMBAD(NBAD+NBA), - 1 BADREC(NBAD+NBA), - 2 IFSECV(NUMBAD(NBAD+NBA)), - 3 NBA=1,NADD) - 311 FORMAT(/' ADDED BAD RECORDS ARE:',95X,'ERC'/(3X,I4,'...',I4, - 1 '...',A,'...',I3)) - NBAD=NBAD+NADD - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: WRNING WRITES WARNING MESSAGE ABOUT RECORD MODS -C PRGMMR: S. LORD ORG: NP22 DATE: 1992-02-21 -C -C ABSTRACT: WRITES SIMPLE WARNING MESSAGE. -C -C PROGRAM HISTORY LOG: -C 1992-02-21 S. LORD -C -C USAGE: CALL WRNING(IDSUB) -C INPUT ARGUMENT LIST: -C IDSUB - SUBROUTINE NAME -C -C REMARKS: SEE REMARKS IN CODE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE WRNING(IDSUB) - - CHARACTER*6 IDSUB - - WRITE(6,1) IDSUB - 1 FORMAT(21X,'***********************************************'/ - 1 21X,'***********************************************'/ - 2 21X,'**** ****'/ - 3 21X,'**** WARNING: RECORDS WITH CORRECT FORMAT ****'/ - 4 21X,'**** BUT MISSING OR ERRONEOUS ****'/ - 5 21X,'**** DATA MAY BE MODIFIED BY ****'/ - 6 21X,'**** THIS ROUTINE=',A6,'!!! ****'/ - 7 21X,'**** ****'/ - 8 21X,'**** TYPES OF SUBSTITUTIONS ARE: ****'/ - 9 21X,'**** CLIMATOLOGICAL SUBSTITUTION: C ****'/ - O 21X,'**** RSMC AVERAGING: A ****'/ - 1 21X,'**** PERSISTENCE SUBSTITUTION: P ****'/ - 2 21X,'**** OVERLAP MODIFICATION: O ****'/ - 3 21X,'**** DIRECTION/SPEED SUBSTITUTION: S ****'/ - 4 21X,'**** LATITUDE/LONGITUDE SUBSTITUTION: L ****'/ - 4 21X,'**** SWITCHED PCEN-PENV SUBSTITUTION: Z ****'/ - 8 21X,'**** ****'/ - 6 21X,'***********************************************'/ - 7 21X,'***********************************************') - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: F1 RECALCULATES LONGITUDES -C PRGMMR: S. LORD ORG: NP22 DATE: 1993-05-01 -C -C ABSTRACT: SEE COMMENTS IN PROGRAM. ORIGINALLY WRITTEN BY C. J. NEWMANN -C -C PROGRAM HISTORY LOG: -C 1993-05-01 S. LORD INSTALLED PROGRAM -C -C USAGE: CALL F1(ALON) -C INPUT ARGUMENT LIST: SEE COMMENTS IN PROGRAM -C -C OUTPUT ARGUMENT LIST: -C SEE COMMENTS IN PROGRAM -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - FUNCTION F1(ALON) - -C CONVERT FROM E LONGITUDE TO THOSE ACCEPTABLE IN AL TAYLOR ROUTINES - - IF(ALON.GT.180.)F1=360.-ALON - IF(ALON.LE.180.)F1=-ALON - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: F2 CALCULATES DATES -C PRGMMR: D. A. KEYSER ORG: NP22 DATE: 1998-06-05 -C -C ABSTRACT: SEE COMMENTS IN PROGRAM. ORIGINALLY WRITTEN BY C. J. -C NEWMANN -C -C PROGRAM HISTORY LOG: -C 1993-05-01 S. LORD INSTALLED PROGRAM -C 1998-06-05 D. A. KEYSER - Y2K, FORTRAN 90 COMPLIANT -C -C USAGE: CALL F2(IDATIM) -C INPUT ARGUMENT LIST: -C IDATIM - 10-DIGIT DATE IN FORM YYYYDDMMHH -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - FUNCTION F2(IDATIM) - -C OBTAIN JULIAN DAY NUMBER -C 0000UTC ON 1 JAN IS SET TO DAY NUMBER 0 AND 1800UTC ON 31 DEC IS SET -C TO DAY NUMBER 364.75. LEAP YEARS ARE IGNORED. - - CHARACTER*10 ALFA - WRITE(ALFA,'(I10)')IDATIM - READ(ALFA,'(I4,3I2)')KYR,MO,KDA,KHR - MON=MO - IF(MON.EQ.13)MON=1 - DANBR=3055*(MON+2)/100-(MON+10)/13*2-91+KDA - F2=DANBR-1.+REAL(KHR/6)*0.25 - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: SLDATE RETRIEVES DATE FROM SYSTEM AND DATE FILE -C PRGMMR: D. A. KEYSER ORG: NP22 DATE: 1998-06-05 -C -C ABSTRACT: RETRIEVES DATE FROM SYSTEM AND FROM A DATE FILE, AND -C OBTAINS THE DIFFERENCE BETWEEN THE TWO. CONSTRUCTS DATE -C IN FORM YYYYMMDD AND HHMM. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C 1998-06-05 D. A. KEYSER - Y2K/F90 COMPLIANCE -C -C USAGE: CALL SLDATE(IUNTDT,IDATEZ,IUTCZ,IOFFTM) -C INPUT ARGUMENT LIST: -C IUNTDT - UNIT NUMBER FOR FILE CONTAINING RUN DATE -C -C OUTPUT ARGUMENT LIST: -C IDATEZ - DATE IN FORM YYYYMMDD -C IUTCZ - DATE IN FORM HHMM -C IOFFTM - OFFSET (HOURS *100) BETWEEN SYSTEM DATE AND -C - FILE DATE (SYSTEM DATE MINUS FILE DATE) -C -C INPUT FILES: -C UNIT "IUNTDT" - FILE CONTAINING RUN DATE IN I4,3I2 FORMAT -C - ('YYYYMMDDHH') -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE SLDATE(IUNTDT,IDATEZ,IUTCZ,IOFFTM) - - CHARACTER USRDAT*10 - - SAVE - - DIMENSION IDAT(8),JDAT(6),RINC(5) - - EQUIVALENCE (IDAT(1),JW3YR),(IDAT(2),JW3MO),(IDAT(3),JW3DA), - 2 (IDAT(5),JW3HR),(IDAT(6),JW3MIN),(IDAT(7),JW3SEC) - - READ(IUNTDT,1) USRDAT - 1 FORMAT(A10) - WRITE(6,3) USRDAT - 3 FORMAT(/'...',A10,'...') - -C OBTAIN CURRENT SYSTEM DATE - IDAT (UTC) - - CALL W3UTCDAT(IDAT) - -C DECODE THE DATE LABEL INTO JDAT (UTC) - - READ(USRDAT(1: 4),'(I4)') JDAT(1) - READ(USRDAT(5: 6),'(I2)') JDAT(2) - READ(USRDAT(7: 8),'(I2)') JDAT(3) - READ(USRDAT(9:10),'(I2)') JDAT(5) - -C THIS IS THE TIME ZONE OFFSET (SAME AS FOR IDAT) - JDAT(4) = IDAT(4) - - JDAT(6) = 0 - -C COMBINE YEAR, MONTH, DAY, HOUR, MINUTE TO FORM YYYYMMDD - - IDATEZ=JDAT(1)*10000+JDAT(2)*100+JDAT(3) - IUTCZ =JDAT(5)*100+JDAT(6) - -C OBTAIN TIME DIFFERENCE (CURRENT TIME - LABEL TIME) IN HOURS * 100 - - CALL W3DIFDAT(IDAT,(/JDAT(1),JDAT(2),JDAT(3),JDAT(4),JDAT(5), - $ JDAT(6),0,0/),2,RINC) - IOFFTM=NINT(RINC(2)*100.) - - WRITE(6,5) JW3YR,JW3MO,JW3DA,JW3HR,JW3MIN,JW3SEC,IOFFTM - 5 FORMAT(/'...CURRENT DATE/TIME FROM W3UTCDAT CALL IS:'/'JW3YR=',I5, - 1 ' JW3MO=',I3,' JW3DA=',I3,' JW3HR=',I5,' JW3MIN=',I5, - 2 ' JW3SEC=',I5,' OFFTIM=',I8) - - WRITE(6,13) IDATEZ,IUTCZ - 13 FORMAT('...IN SLDATE, IDATEZ,IUTCZ=',I10,2X,I4) - - RETURN - -C----------------------------------------------------------------------- - ENTRY SLDTCK(IUNTDT) - - REWIND IUNTDT - WRITE(6,21) IUNTDT - 21 FORMAT('...WRITING USRDAT TO UNIT',I3) - WRITE(IUNTDT,1) USRDAT - - RETURN - -C----------------------------------------------------------------------- - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: FIXSLM MODIFIES SEA-LAND MASK -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: MODIFIES NCEP T126 GAUSSIAN GRID SEA-LAND MASK. CONVERTS -C SOME SMALL ISLANDS TO OCEAN POINTS. PROGRAM IS DEPENDENT -C ON MODEL RESOLUTION. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C 1992-04-08 S. J. LORD CONVERTED TO T126 FROM T80 -C -C USAGE: CALL FIXSLM(LONF,LATG2,RLON,RLAT,SLMASK) -C INPUT ARGUMENT LIST: -C LONF - NUMBER OF LONGITUDINAL POINTS, FIRST INDEX OF SLMASK -C LATG2 - NUMBER OF LATITUDINAL POINTS, SECOND INDEX OF SLMASK -C RLON - LONGITUDES -C RLAT - LATITUDES -C SLMASK - T162 SEA-LAND MASK ON GAUSSIAN GRID -C -C OUTPUT ARGUMENT LIST: -C SLMASK - MODIFIED T162 SEA-LAND MASK ON GAUSSIAN GRID -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE FIXSLM(LONF,LATG2,RLON,RLAT,SLMASK) - - PARAMETER (MAXSLM=35) - - SAVE - - DIMENSION RLAT(LATG2),RLON(LONF),SLMASK(LONF,LATG2),IPT(MAXSLM), - 1 JPT(MAXSLM) - - DATA NOCEAN/21/, - -C INDONESIAN ARCHIPELAGO,NEW CALEDONIA - - 1 IPT/133,135,129,177, - -C YUCATAN - - 2 290,291,292,289,290,291,289,290,291, - -C CUBA - - 3 299,300,301,302,303,303,304,305,14*0.0/, - -C INDONESIAN ARCHIPELAGO,NEW CALEDONIA - - 1 JPT/106,105,106,118, - -C YUCATAN - - 2 3*73,3*74,3*75, - -C CUBA - - 3 3*72,2*73,3*74,14*0.0/ - -C WRITE(6,7) -C 7 FORMAT('...CONVERTING LAND TO OCEAN, NPT,IPT,RLON,JPT,RLAT=') - DO NPT=1,NOCEAN - SLMASK(IPT(NPT),JPT(NPT))=0.0 -C WRITE(6,9) NPT,IPT(NPT),RLON(IPT(NPT)),JPT(NPT),RLAT(JPT(NPT)) -C 9 FORMAT(4X,2I5,F10.3,I5,F10.3) - ENDDO - - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: GAULAT CALCULATES GAUSSIAN GRID LATITUDES -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: CALCULATES GAUSSIAN GRID LATITUDES -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD - COPIED FROM KANAMITSU LIBRARY -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE GAULAT(GAUL,K) - - IMPLICIT REAL(8) (A-H,O-Z) - DIMENSION A(500) - REAL GAUL(1) - - SAVE - - ESP=1.D-14 - C=(1.D0-(2.D0/3.14159265358979D0)**2)*0.25D0 - FK=K - KK=K/2 - CALL BSSLZ1(A,KK) - DO IS=1,KK - XZ=COS(A(IS)/SQRT((FK+0.5D0)**2+C)) - ITER=0 - 10 PKM2=1.D0 - PKM1=XZ - ITER=ITER+1 - IF(ITER.GT.10) GO TO 70 - DO N=2,K - FN=N - PK=((2.D0*FN-1.D0)*XZ*PKM1-(FN-1.D0)*PKM2)/FN - PKM2=PKM1 - PKM1=PK - ENDDO - PKM1=PKM2 - PKMRK=(FK*(PKM1-XZ*PK))/(1.D0-XZ**2) - SP=PK/PKMRK - XZ=XZ-SP - AVSP=ABS(SP) - IF(AVSP.GT.ESP) GO TO 10 - A(IS)=XZ - ENDDO - IF(K.EQ.KK*2) GO TO 50 - A(KK+1)=0.D0 - PK=2.D0/FK**2 - DO N=2,K,2 - FN=N - PK=PK*FN**2/(FN-1.D0)**2 - ENDDO - 50 CONTINUE - DO N=1,KK - L=K+1-N - A(L)=-A(N) - ENDDO - - RADI=180./(4.*ATAN(1.)) - GAUL(1:K)=ACOS(A(1:K))*RADI -C PRINT *,'GAUSSIAN LAT (DEG) FOR JMAX=',K -C PRINT *,(GAUL(N),N=1,K) - - RETURN - 70 WRITE(6,6000) - 6000 FORMAT(//5X,14HERROR IN GAUAW//) - CALL ABORT1(' GAULAT',6000) - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: BSSLZ1 CALCULATES BESSEL FUNCTIONS -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: CALCULATES BESSEL FUNCTIONS -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD - COPIED FROM KANAMITSU LIBRARY -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE BSSLZ1(BES,N) - - IMPLICIT REAL(8) (A-H,O-Z) - DIMENSION BES(N) - DIMENSION BZ(50) - - DATA PI/3.14159265358979D0/ - DATA BZ / 2.4048255577D0, 5.5200781103D0, - $ 8.6537279129D0,11.7915344391D0,14.9309177086D0,18.0710639679D0, - $ 21.2116366299D0,24.3524715308D0,27.4934791320D0,30.6346064684D0, - $ 33.7758202136D0,36.9170983537D0,40.0584257646D0,43.1997917132D0, - $ 46.3411883717D0,49.4826098974D0,52.6240518411D0,55.7655107550D0, - $ 58.9069839261D0,62.0484691902D0,65.1899648002D0,68.3314693299D0, - $ 71.4729816036D0,74.6145006437D0,77.7560256304D0,80.8975558711D0, - $ 84.0390907769D0,87.1806298436D0,90.3221726372D0,93.4637187819D0, - $ 96.6052679510D0,99.7468198587D0,102.888374254D0,106.029930916D0, - $ 109.171489649D0,112.313050280D0,115.454612653D0,118.596176630D0, - $ 121.737742088D0,124.879308913D0,128.020877005D0,131.162446275D0, - $ 134.304016638D0,137.445588020D0,140.587160352D0,143.728733573D0, - $ 146.870307625D0,150.011882457D0,153.153458019D0,156.295034268D0/ - NN=N - IF(N.LE.50) GO TO 12 - BES(50)=BZ(50) - BES(51:N)=BES(50:N-1)+PI - NN=49 - 12 CONTINUE - BES(1:NN)=BZ(1:NN) - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: TRKSUB DETERMINES OBS. TROP. CYCLONE TRACKS -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: CONTAINS VARIOUS ENTRY POINTS TO DETERMINE TROPICAL -C CYCLONE TRACKS, CALCULATE STORM RELATIVE COORDINATE, DETERMINES -C FIRST OCCURRENCE OF A PARTICULAR STORM. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C -C USAGE: CALL TRKSUB(IOVITL,IOPTZ,IDATTK,DAY0,DAYMN,DAYMX,DAYOFF, -C 1 STMDR0,STMSP0,STLAT0,STLON0,IERSET, -C 3 STLATP,STLONP,STDIRP,STSPDP,STDAYP, -C 4 STRMXP,STPCNP,STPENP,STVMXP,KSTPZ, -C 5 STDPTP,STMNTK) -C CALL SETTRK(IOVITL,IOPTZ,IDATTK,DAY0,DAYMN,DAYMX,DAYOFF, -C 1 STMDR0,STMSP0,STLAT0,STLON0,STMNTK,IERSET) -C INPUT ARGUMENT LIST: -C DAY0 - FRACTIONAL NUMBER OF DAYS SINCE 12/31/1899 -C DAYMX - FRACTIONAL NUMBER OF DAYS SINCE 12/31/1899 (MAX) -C DAYMN - FRACTIONAL NUMBER OF DAYS SINCE 12/31/1899 (MIN) -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE TRKSUB(IOVITL,IOPTZ,IDATTK,DAY0,DAYMN,DAYMX,DAYOFF, - 1 STMDR0,STMSP0,STLAT0,STLON0,IERSET,STLATP, - 2 STLONP,STDIRP,STSPDP,STDAYP,STRMXP,STPCNP, - 3 STPENP,STVMXP,KSTPZ,STDPTP,STMNTK) - - PARAMETER (MAXSTM=70) - PARAMETER (NSTM=MAXSTM,NSTM1=NSTM+1) - PARAMETER (NPRVMX=61) - - LOGICAL NOMIN,NOMAX,EXTRPB,EXTRPF - CHARACTER STMNTK*(*),STDPTP*1 - - SAVE - - DIMENSION STDPTP(-NPRVMX:-1) - - DIMENSION RINC(5) - - CHARACTER STMNAM*9,STMID*3,RSMC*4 - - LOGICAL FSTFLG - - DIMENSION STMNAM(MAXSTM),STMLAT(MAXSTM),STMLON(MAXSTM), - 1 STMDIR(MAXSTM),STMSPD(MAXSTM),IDATE(MAXSTM), - 2 IUTC(MAXSTM),RMAX(MAXSTM),PENV(MAXSTM),PCEN(MAXSTM), - 3 PTOP(MAXSTM),RSMC(MAXSTM),RMW(MAXSTM),VMAX(MAXSTM), - 4 R15NW(MAXSTM),R15NE(MAXSTM),R15SE(MAXSTM),R15SW(MAXSTM), - 5 STMID(MAXSTM),FSTFLG(MAXSTM) - - PARAMETER (MAXTPC= 3) - - CHARACTER SHALO*1,MEDIUM*1,DEEP*1,STMTPC*1,EXE*1 - - DIMENSION STMTOP(0:MAXTPC) - - DIMENSION STMTPC(0:MAXTPC) - - EQUIVALENCE (STMTPC(0), EXE),(STMTPC(1),SHALO),(STMTPC(2),MEDIUM), - 1 (STMTPC(3),DEEP) - - DIMENSION TRKLTZ(0:NSTM1),TRKLNZ(0:NSTM1), - 1 TRKDRZ(0:NSTM1),TRKSPZ(0:NSTM1), - 2 TRKRMX(0:NSTM1),TRKPCN(0:NSTM1), - 3 TRKPEN(0:NSTM1),TRKVMX(0:NSTM1), - 4 TRKDPT(0:NSTM1) - - DIMENSION STMDAY(0:NSTM1),SRTDAY(NSTM),IDASRT(0:NSTM1), - 1 SRTLAT(NSTM),SRTLON(NSTM),SRTDIR(NSTM),SRTSPD(NSTM), - 2 ISRTDA(NSTM),ISRTUT(NSTM),SRTRMX(NSTM),SRTPCN(NSTM), - 3 SRTPEN(NSTM),SRTVMX(NSTM),SRTDPT(NSTM) - - DIMENSION STLATP(-NPRVMX:-1),STLONP(-NPRVMX:-1), - 1 STDIRP(-NPRVMX:-1),STSPDP(-NPRVMX:-1), - 1 STDAYP(-NPRVMX: 0),STRMXP(-NPRVMX:-1), - 1 STPCNP(-NPRVMX:-1),STPENP(-NPRVMX:-1), - 2 STVMXP(-NPRVMX:-1) - - EQUIVALENCE (TRKLTZ(1),STMLAT(1)),(TRKLNZ(1),STMLON(1)), - 1 (TRKDRZ(1),STMDIR(1)),(TRKSPZ(1),STMSPD(1)), - 2 (TRKRMX(1),RMAX (1)),(TRKPCN(1),PCEN (1)), - 3 (TRKPEN(1),PENV (1)),(TRKVMX(1),VMAX (1)), - 4 (TRKDPT(1),PTOP (1)) - - DATA SHALO/'S'/,MEDIUM/'M'/,DEEP/'D'/,EXE/'X'/, - 1 STMTOP/-99.0,700.,400.,200./ - -C FIVMIN IS FIVE MINUTES IN UNITS OF FRACTIONAL DAYS -C FACSPD IS CONVERSION FACTOR FOR R(DEG LAT)=V(M/S)*T(FRAC DAY)* - - DATA IPRNT/0/,FIVMIN/3.4722E-3/,FACSPD/0.77719/ - -C----------------------------------------------------------------------- - - ENTRY SETTRK(IOVITL,IOPTZ,IDATTK,DAY0,DAYMN,DAYMX,DAYOFF, - 1 STMDR0,STMSP0,STLAT0,STLON0,STMNTK,IERSET) - - IERSET=0 - IOPT=IOPTZ - IDTREQ=IDATTK - IF(IOPT .EQ. 5) THEN - STMID (1)=STMNTK(1:3) - ELSE IF(IOPT .EQ. 6) THEN - STMNAM(1)=STMNTK(1:9) - ELSE - WRITE(6,1) IOPT - 1 FORMAT(/'******ILLEGAL OPTION IN SETTRK=',I4) - IERSET=1 - RETURN - ENDIF - - WRITE(6,6) IOPT,STMNTK,DAY0,DAYMN,DAYMX,IDTREQ,IHRREQ - 6 FORMAT(/'...ENTERING SETTRK, WITH IOPT=',I2,'. LOOKING FOR ALL ', - 1 'FIXES FOR ',A,' WITH CENTRAL TIME=',F12.2,/4X,' MIN/MAX', - 2 ' TIMES=',2F12.2,' AND REQUESTED DATE/TIME=',2I10) - - CALL NEWVIT(IOVITL,IPRNT,IOPT,IERVIT,MAXSTM,KSTORM,IDTREQ,IHRREQ, - 1 IHRWIN,IDATE,IUTC,STMLAT,STMLON,STMDIR,STMSPD, - 2 PCEN,PENV,RMAX,VMAX,RMW,R15NE,R15SE,R15SW,R15NW, - 3 PTOP,FSTFLG,STMNAM,STMID,RSMC) - -C CONVERT FIX TIMES TO FLOATING POINT JULIAN DAY - - DO KST=1,KSTORM - CALL ZTIME(IDATE(KST),IUTC(KST),IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/), - $ 1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,STMDAY(KST)) - STMDAY(KST)=STMDAY(KST)+DAYOFF - -c WRITE(6,16) IDATE(KST),IUTC(KST),IYR,IMO,IDA,IHR,IMIN,JDY, -c 1 STMDAY(KST) -c 16 FORMAT('...STORM FIX TIMES ARE: IDATE,IUTC,IYR,IMO,IDA,IHR,IMIN,', -c 1 'JDY,STMDAY'/4X,8I8,F15.5) - - ENDDO - - CALL SORTRL(STMDAY(1:KSTORM),IDASRT(1:KSTORM),KSTORM) - -c WRITE(6,26) (STMDAY(KST),IDASRT(KST),KST=1,KSTORM) -c 26 FORMAT(/'...SORTED STORM DAYS AND INDEX ARE:'/(5X,F15.5,I6)) - -C PICK OUT TIMES AND LOCATIONS FROM SORTED LIST OF STORM DAYS - - NOMIN=.TRUE. - NOMAX=.TRUE. - EXTRPB=.FALSE. - EXTRPF=.FALSE. - KSRTMN=-1 - KSRTMX=-1 - - DO KSRT=1,KSTORM - - IF(STMDAY(KSRT) .GT. DAYMN .AND. NOMIN) THEN - NOMIN=.FALSE. - KSRTMN=KSRT-1 - ENDIF - - IF(STMDAY(KSRT) .GT. DAYMX .AND. NOMAX) THEN - NOMAX=.FALSE. - KSRTMX=KSRT - ENDIF - - ENDDO - - IF(KSRTMN .LE. 0) THEN - -C WE HAVENT'T BEEN ABLE TO FIND A STMDAY THAT IS LESS THAN 8 HOURS -C EARLIER THAN THE TIME WINDOW. EITHER THIS IS THE FIRST TIME -C THIS STORM HAS BEEN RUN OR THERE MAY BE AN ERROR. IN EITHER -C CASE, WE ALLOW EXTRAPOLATION OF THE OBSERVED MOTION BACK -C IN TIME, BUT SET AN ERROR FLAG. THE SAME METHOD IS -C USED FOR THE LAST RUN OF A PARTICULAR STORM. - - DT=STMDAY(1)-DAYMN - IF(DT .LE. 0.333333) THEN - WRITE(6,41) KSTORM,KSRT,DAYMN,(STMDAY(KST),KST=1,KSTORM) - 41 FORMAT(/'######CANNOT FIND STORM RECORDS LESS THAN 8 HOURS ', - 1 'BEFORE WINDOW MINIMUM.'/7X,'THIS IS THE FIRST RECORD ', - 2 'FOR THIS STORM OR THERE MAY BE AN ERROR. KSTORM,KSRT,', - 3 'DAYMN,STMDAY=',2I4,F10.3/(5X,10F12.3)) - IERSET=41 - ENDIF - - EXTRPB=.TRUE. - KSRTMN=0 - ISRT=IDASRT(1) - IDASRT(KSRTMN)=0 - STMDAY(KSRTMN)=DAYMN - TRKDRZ(KSRTMN)=STMDIR(ISRT) - TRKSPZ(KSRTMN)=STMSPD(ISRT) - CALL DS2UV(USTM,VSTM,STMDIR(ISRT),STMSPD(ISRT)) - TRKLTZ(KSRTMN)=STMLAT(ISRT)-VSTM*DT*FACSPD - TRKLNZ(KSRTMN)=STMLON(ISRT)-USTM*DT*FACSPD/COSD(STMLAT(ISRT)) - TRKRMX(KSRTMN)=RMAX(ISRT) - TRKPCN(KSRTMN)=PCEN(ISRT) - TRKPEN(KSRTMN)=PENV(ISRT) - TRKVMX(KSRTMN)=VMAX(ISRT) - TRKDPT(KSRTMN)=PTOP(ISRT) - WRITE(6,39) ISRT,KSRTMN,STMDAY(KSRTMN),TRKDRZ(KSRTMN), - 1 TRKSPZ(KSRTMN),USTM,VSTM,DT,TRKLTZ(KSRTMN), - 2 TRKLNZ(KSRTMN),STMLAT(ISRT),STMLON(ISRT) - 39 FORMAT(/'...EXTRAPOLATING FIX BACKWARDS IN TIME: ISRT,KSRTMN,', - 1 '(STMDAY,TRKDRZ,TRKSPZ(KSRTMN)),USTM,VSTM,DT,'/41X, - 2 '(TRKLTZ,TRKLNZ(KSRTMN)),(STMLAT,STMLON(ISRT))='/40X, - 3 2I3,6F12.3/43X,4F12.3) - ENDIF - - IF(KSRTMX .LE. 0) THEN - DT=DAYMX-STMDAY(KSTORM) - IF(DT .LE. 0.333333) THEN - WRITE(6,51) KSTORM,KSRT,DAYMX,(STMDAY(KST),KST=1,KSTORM) - 51 FORMAT(/'######CANNOT FIND STORM RECORDS MORE THAN 8 HOURS ', - 1 'AFTER WINDOW MAXIMUM.'/7X,'THIS IS THE LAST RECORD ', - 2 'FOR THIS STORM OR THERE MAY BE AN ERROR. KSTORM,KSRT,', - 3 'DAYMX,STMDAY=',2I4,F10.3/(5X,10F12.3)) - IERSET=51 - ENDIF - - EXTRPF=.TRUE. - KSRTMX=KSTORM+1 - ISRT=IDASRT(KSTORM) - IDASRT(KSRTMX)=KSTORM+1 - STMDAY(KSRTMX)=DAYMX - TRKDRZ(KSRTMX)=STMDIR(ISRT) - TRKSPZ(KSRTMX)=STMSPD(ISRT) - CALL DS2UV(USTM,VSTM,TRKDRZ(ISRT),TRKSPZ(ISRT)) - TRKLTZ(KSRTMX)=STMLAT(ISRT)+VSTM*DT*FACSPD - TRKLNZ(KSRTMX)=STMLON(ISRT)+USTM*DT*FACSPD/COSD(STMLAT(ISRT)) - TRKRMX(KSRTMX)=RMAX(ISRT) - TRKPCN(KSRTMX)=PCEN(ISRT) - TRKPEN(KSRTMX)=PENV(ISRT) - TRKVMX(KSRTMX)=VMAX(ISRT) - TRKDPT(KSRTMX)=PTOP(ISRT) - WRITE(6,49) ISRT,STMDAY(KSRTMX),TRKDRZ(KSRTMX),TRKSPZ(KSRTMX), - 1 USTM,VSTM,DT,TRKLTZ(KSRTMX),TRKLNZ(KSRTMX), - 2 STMLAT(ISRT),STMLON(ISRT) - 49 FORMAT(/'...EXTRAPOLATING FIX FORWARDS IN TIME: ISRT,(STMDAY,', - 1 'TRKDIR,TRKSPD(KSRTMX)),USTM,VSTM,DT,'/41X,'(TRKLTZ,', - 2 'TRKLNZ(KSRTMX)),(STMLAT,STMLON(ISRT))='/40X,I3,6F12.3/ - 3 43X,4F12.3) - - ENDIF - - KK=1 - KST0=-1 - TIMMIN=1.E10 - -C PUT ALL FIXES THAT DEFINE THE TIME WINDOW INTO ARRAYS SORTED -C BY TIME. FIRST, ELIMINATE RECORDS WITH DUPLICATE TIMES. -C OUR ARBITRARY CONVENTION IS TO KEEP THE LATEST RECORD. ANY -C FIX TIME WITHIN 5 MINUTES OF ITS PREDECESSOR IN THE SORTED -C LIST IS CONSIDERED DUPLICATE. - - DO KST=KSRTMN,KSRTMX - IF(KST .GT. KSRTMN) THEN - IF(STMDAY(KST)-SRTDAY(KK) .LT. FIVMIN) THEN - WRITE(6,53) KST,KK,STMDAY(KST),SRTDAY(KK) - 53 FORMAT(/'...TIME SORTED FIX RECORDS SHOW A DUPLICATE, KST,KK,', - 1 'STMDAY(KST),SRTDAY(KK)=',2I5,2F12.3) - ELSE - KK=KK+1 - ENDIF - ENDIF - -C STORE SORTED LAT/LON, DIRECTION, SPEED FOR FUTURE USE. - - SRTLAT(KK)=TRKLTZ(IDASRT(KST)) - SRTLON(KK)=TRKLNZ(IDASRT(KST)) - SRTDIR(KK)=TRKDRZ(IDASRT(KST)) - SRTSPD(KK)=TRKSPZ(IDASRT(KST)) - SRTDAY(KK)=STMDAY(KST) - SRTRMX(KK)=TRKRMX(IDASRT(KST)) - SRTPCN(KK)=TRKPCN(IDASRT(KST)) - SRTPEN(KK)=TRKPEN(IDASRT(KST)) - SRTVMX(KK)=TRKVMX(IDASRT(KST)) - SRTDPT(KK)=TRKDPT(IDASRT(KST)) - -c fixit?? - to avoid subscript zero warning on next two lines, I did -c the following .... -cdak ISRTDA(KK)=IDATE(IDASRT(KST)) -cdak ISRTUT(KK)=IUTC (IDASRT(KST)) - if(IDASRT(KST).ne.0) then - ISRTDA(KK)=IDATE(IDASRT(KST)) - ISRTUT(KK)=IUTC (IDASRT(KST)) - else - ISRTDA(KK)=0 - ISRTUT(KK)=0 - end if - - IF(ABS(SRTDAY(KK)-DAY0) .LT. TIMMIN) THEN - IF(ABS(SRTDAY(KK)-DAY0) .LT. FIVMIN) KST0=KK - KSTZ=KK - TIMMIN=ABS(SRTDAY(KK)-DAY0) - ENDIF - ENDDO - - KSTMX=KK - -C ZERO OUT EXTRAPOLATED DATE AND TIME AS A REMINDER - - IF(EXTRPF) THEN - ISRTDA(KSTMX)=0 - ISRTUT(KSTMX)=0 - ENDIF - - IF(EXTRPB) THEN - ISRTDA(1)=0 - ISRTUT(1)=0 - ENDIF - - IF(KSTZ .EQ. KSTMX .AND. .NOT. (EXTRPB .OR. EXTRPF)) THEN - WRITE(6,61) KSTZ,KSTMX,(SRTDAY(KST),KST=1,KSTMX) - 61 FORMAT(/'******THE REFERENCE STORM INDEX IS THE MAXIMUM ALLOWED ', - 1 'A PROBABLE ERROR HAS OCCURRED'/8X,'KSTZ,KSTMX,SRTDAY=', - 2 2I5/(5X,10F12.3)) - CALL ABORT1(' SETTRK',61) - ENDIF - - IF(KST0 .LE. 0) THEN - WRITE(6,72) DAY0,KST0,(SRTDAY(KST),KST=1,KSTMX) - 72 FORMAT(/'******THERE IS NO FIX AT THE ANALYSIS TIME, AN ', - 1 'INTERPOLATED POSITION WILL BE CALCULATED'/5X,'DAY0,', - 2 'KST0,SRTDAY=',F12.3,I6/(5X,10F12.3)) - IF(DAY0-SRTDAY(KSTZ) .GT. 0.0) THEN - RATIO=(DAY0-SRTDAY(KSTZ))/(SRTDAY(KSTZ+1)-SRTDAY(KSTZ)) - STLAT0=SRTLAT(KSTZ)+(SRTLAT(KSTZ+1)-SRTLAT(KSTZ))*RATIO - STLON0=SRTLON(KSTZ)+(SRTLON(KSTZ+1)-SRTLON(KSTZ))*RATIO - STMDR0=SRTDIR(KSTZ)+(SRTDIR(KSTZ+1)-SRTDIR(KSTZ))*RATIO - STMSP0=SRTSPD(KSTZ)+(SRTSPD(KSTZ+1)-SRTSPD(KSTZ))*RATIO - STDAY0=DAY0 - ELSE - RATIO=(DAY0-SRTDAY(KSTZ-1))/(SRTDAY(KSTZ)-SRTDAY(KSTZ-1)) - STLAT0=SRTLAT(KSTZ-1)+(SRTLAT(KSTZ)-SRTLAT(KSTZ-1))*RATIO - STLON0=SRTLON(KSTZ-1)+(SRTLON(KSTZ)-SRTLON(KSTZ-1))*RATIO - STMDR0=SRTDIR(KSTZ-1)+(SRTDIR(KSTZ)-SRTDIR(KSTZ-1))*RATIO - STMSP0=SRTSPD(KSTZ-1)+(SRTSPD(KSTZ)-SRTSPD(KSTZ-1))*RATIO - STDAY0=DAY0 - ENDIF - - ELSE - STLAT0=SRTLAT(KST0) - STLON0=SRTLON(KST0) - STMDR0=SRTDIR(KST0) - STMSP0=SRTSPD(KST0) - STDAY0=SRTDAY(KST0) - ENDIF - - WRITE(6,77) (KSRT,ISRTDA(KSRT),ISRTUT(KSRT), - 1 SRTDAY(KSRT),SRTLAT(KSRT),SRTLON(KSRT), - 2 SRTDIR(KSRT),SRTSPD(KSRT), - 3 SRTPCN(KSRT),SRTPEN(KSRT),SRTRMX(KSRT), - 4 SRTVMX(KSRT),SRTDPT(KSRT),KSRT=1,KSTMX) - 77 FORMAT(/'...FINAL SORTED LIST IS:'/6X,'YYYYMMDD',2X,'HHMM',4X, - 1 'RJDAY',7X,'LAT',7X,'LON',6X,'DIR',7X,'SPEED',4X,' PCEN', - 2 26X,'PENV',6X,'RMAX',5X,'VMAX',4X,'PTOP'/(1X,I3,2X,I8,2X, - 3 I4,8F10.2,2(3X,F5.1))) - - WRITE(6,79) STDAY0,STLAT0,STLON0,STMDR0,STMSP0 - 79 FORMAT(/'...THE REFERENCE TIME, LATITUDE, LONGITUDE, DIRECTION ', - 1 'AND SPEED ARE:',5F12.3) - - WRITE(6,89) - 89 FORMAT(/'...END SETTRK') - - RETURN - -C----------------------------------------------------------------------- - - ENTRY PRVSTM(STLATP,STLONP,STDIRP,STSPDP,STDAYP, - 1 STRMXP,STPCNP,STPENP,STVMXP,STDPTP,KSTPZ) - -C THIS ENTRY IS CURRENTLY SET UP TO RETURN THE TWO PREVIOUS -C SETS OF STORM LAT/LON, DIR/SPD, TIME. FOR CASES IN WHICH -C INSUFFICIENT STORM RECORDS HAVE BEEN FOUND, THE SLOTS ARE -C FILLED WITH -99.0 OR A DASH - -C KSTPZ IS THE NUMBER OF PREVIOUS, NON-EXTRAPOLATED, STORM RECORDS - - KSTPZ=MIN0(MAX0(KSTZ-1,0),NPRVMX) - STLATP(-NPRVMX:-1)=-99.0 - STLONP(-NPRVMX:-1)=-99.0 - STDIRP(-NPRVMX:-1)=-99.0 - STSPDP(-NPRVMX:-1)=-99.0 - STDAYP(-NPRVMX:-1)=-99.0 - STRMXP(-NPRVMX:-1)=-99.0 - STPCNP(-NPRVMX:-1)=-99.0 - STPENP(-NPRVMX:-1)=-99.0 - STVMXP(-NPRVMX:-1)=-99.0 - STDPTP(-NPRVMX:-1)='-' - - DO KSTP=1,KSTPZ - STLATP(-KSTP)=SRTLAT(KSTZ-KSTP) - STLONP(-KSTP)=SRTLON(KSTZ-KSTP) - STDIRP(-KSTP)=SRTDIR(KSTZ-KSTP) - STSPDP(-KSTP)=SRTSPD(KSTZ-KSTP) - STDAYP(-KSTP)=SRTDAY(KSTZ-KSTP) - STRMXP(-KSTP)=SRTRMX(KSTZ-KSTP) - STPCNP(-KSTP)=SRTPCN(KSTZ-KSTP) - STPENP(-KSTP)=SRTPEN(KSTZ-KSTP) - STVMXP(-KSTP)=SRTVMX(KSTZ-KSTP) - -C RECODE PRESSURE STORM DEPTH INTO A CHARACTER - - KTPC=0 - DO KTOP=1,MAXTPC - IF(SRTDPT(KSTZ-KSTP) .EQ. STMTOP(KTOP)) KTPC=KTOP - ENDDO - STDPTP(-KSTP)=STMTPC(KTPC) - - ENDDO - IF(EXTRPB .AND. KSTZ-KSTPZ .LE. 1) KSTPZ=KSTPZ-1 - - IF(KSTPZ .EQ. 0) THEN - WRITE(6,97) - 97 FORMAT(/'...NO STORM RECORDS PRECEEDING THE REFERENCE TIME HAVE ', - 1 'BEEN FOUND BY PRVSTM.') - - ELSE - WRITE(6,98) KSTPZ,NPRVMX,STDAYP(-1) - 98 FORMAT(/'...PRVSTM HAS FOUND',I3,' STORM RECORDS PRECEEDING THE ', - 1 'REFERENCE TIME (MAX ALLOWED=',I2,').'/4X,'THE TIME ', - 2 'CORRESPONDING TO INDEX -1 IS',F12.3,'.') - ENDIF - -C WRITE(6,99) KSTZ,KSTPZ,(STLATP(KK),STLONP(KK),STDIRP(KK), -C 1 STSPDP(KK),STDAYP(KK),STRMXP(KK),STPCNP(KK), -C 2 STPENP(KK),STVMXP(KK),KK=-1,-NPRVMX,-1) -C 99 FORMAT(/'...FROM PRVSTM, KSTZ,KSTPZ,STLATP,STLONP,STDIRP,STSPDP,', -C 1 'STDAYP,STRMXP,STPCNP,STPENP,STVMXP=',2I3/(5X,9F10.2)) - RETURN - -C----------------------------------------------------------------------- - - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: NEWVIT READS TROPICAL CYCLONE VITAL STAT. FILE -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: GENERAL FILE READER FOR TROPICAL CYCLONE VITAL STATISTICS -C FILE. CAN FIND ALL STORMS OF A PARTICULAR NAME OR ID, ALL -C STORMS ON A PARTICULAR DATE/TIME AND VARIOUS COMBINATIONS OF -C THE ABOVE. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE NEWVIT(IOVITL,IPRNT,IOPT,IERVIT,MAXSTM,KSTORM,IDTREQ, - 1 IHRREQ,IHRWIN,IDATE,IUTC,STMLAT,STMLON,STMDIR,STMSPD, - 2 PCEN,PENV,RMAX,VMAX,RMW,R15NE,R15SE,R15SW,R15NW, - 3 PTOP,FSTFLG,STMNAM,STMID,RSMC) - - SAVE - - DIMENSION RINC(5) - - CHARACTER STMNAM*9,STMID*3,RSMC*4 - - LOGICAL FSTFLG - - DIMENSION STMNAM(MAXSTM),STMLAT(MAXSTM),STMLON(MAXSTM), - 1 STMDIR(MAXSTM),STMSPD(MAXSTM),IDATE(MAXSTM), - 2 IUTC(MAXSTM),RMAX(MAXSTM),PENV(MAXSTM),PCEN(MAXSTM), - 3 PTOP(MAXSTM),RSMC(MAXSTM),RMW(MAXSTM),VMAX(MAXSTM), - 4 R15NW(MAXSTM),R15NE(MAXSTM),R15SE(MAXSTM),R15SW(MAXSTM), - 5 STMID(MAXSTM),FSTFLG(MAXSTM) - - PARAMETER (MAXCHR=95) - PARAMETER (MAXVIT=15) - PARAMETER (MAXTPC= 3) - - CHARACTER BUFIN*1,RSMCZ*4,STMIDZ*3,STMNMZ*9,FSTFLZ*1,STMDPZ*1, - 1 LATNS*1,LONEW*1,FMTVIT*6,BUFINZ*100,STMREQ*9,RELOCZ*1 - CHARACTER BUFY2K*1 - - DIMENSION IVTVAR(MAXVIT),VITVAR(MAXVIT),VITFAC(MAXVIT), - 1 ISTVAR(MAXVIT),IENVAR(MAXVIT),STMTOP(0:MAXTPC) - - DIMENSION BUFIN(MAXCHR),FMTVIT(MAXVIT) - DIMENSION BUFY2K(MAXCHR) - - EQUIVALENCE (BUFIN(1),RSMCZ),(BUFIN(5),RELOCZ),(BUFIN(6),STMIDZ), - 1 (BUFIN(10),STMNMZ),(BUFIN(19),FSTFLZ), - 2 (BUFIN(37),LATNS),(BUFIN(43),LONEW), - 3 (BUFIN(95),STMDPZ),(BUFIN(1),BUFINZ) - - EQUIVALENCE (IVTVAR(1),IDATEZ),(IVTVAR(2),IUTCZ) - - EQUIVALENCE (VITVAR( 3),STMLTZ),(VITVAR( 4),STMLNZ), - 1 (VITVAR( 5),STMDRZ),(VITVAR( 6),STMSPZ), - 2 (VITVAR( 7),PCENZ), (VITVAR( 8),PENVZ), - 3 (VITVAR( 9),RMAXZ), (VITVAR(10),VMAXZ), - 4 (VITVAR(11),RMWZ), (VITVAR(12),R15NEZ), - 5 (VITVAR(13),R15SEZ),(VITVAR(14),R15SWZ), - 6 (VITVAR(15),R15NWZ) - - DATA VITFAC/2*1.0,2*0.1,1.0,0.1,9*1.0/, - 1 FMTVIT/'(I8.8)','(I4.4)','(I3.3)','(I4.4)',2*'(I3.3)', - 2 3*'(I4.4)','(I2.2)','(I3.3)',4*'(I4.4)'/, - 3 ISTVAR/20,29,34,39,45,49,53,58,63,68,71,75,80,85,90/, - 4 IENVAR/27,32,36,42,47,51,56,61,66,69,73,78,83,88,93/, - 5 STMTOP/-99.0,700.,400.,200./ - -C FIVMIN IS FIVE MINUTES IN UNITS OF FRACTIONAL DAYS - - DATA FIVMIN/3.4722E-3/,IRDERM/20/,NUM/1/ - -C THIS SUBROUTINE READS A GLOBAL VITAL STATISTICS FILE FOR -C TROPICAL CYCLONES. THERE ARE A NUMBER OF OPTIONS (IOPT) -C UNDER WHICH THIS ROUTINE WILL OPERATE: -C 1) FIND ALL STORMS ON A SPECIFIED DATE/TIME (+WINDOW) -C 2) FIND A PARTICULAR STORM NAME ON A SPECIFIED DATE/TIME -C (+WINDOW) -C 3) FIND ALL OCCURRENCES OF A PARTICULAR STORM NAME -C 4) SAME AS OPTION 2, EXCEPT FOR A PARTICULAR STORM ID -C 5) SAME AS OPTION 3, EXCEPT FOR A PARTICULAR STORM ID -C 6) ALL OCCURRENCES OF A PARTICULAR STORM NAME, EVEN -C BEFORE IT HAD A NAME (FIND FIRST OCCURRENCE OF -C STORM NAME, SUBSTITUE STORM ID, REWIND, THEN -C EXECUTE OPTION 5 - -C STORM ID POSITON CONTAINS THE BASIN IDENTIFIER IN THE -C LAST CHARACTER. THESE ABBREVIATIONS ARE: -C NORTH ATLANTIC: L -C EAST PACIFIC: E -C CENTRAL PACIFIC: C -C WEST PACIFIC: W -C AUSTRALIAN: U -C SOUTH INDIAN: S -C SOUTH PACIFIC P -C N ARABIAN SEA A -C BAY OF BENGAL B -C SOUTH CHINA SEA O -C EAST CHINA SEA T - -C CHECK INPUT ARGUMENTS ACCORDING TO OPTION. ALSO DO OVERHEAD -C CHORES IF NECESSARY - - IERVIT=0 - STMREQ=' ' - IYRREQ=-9999 - - IF(IOPT .LE. 2 .OR. IOPT .EQ. 4) THEN - IF(IDTREQ .LE. 0) THEN - WRITE(6,11) IOPT,IDTREQ,IHRREQ,IHRWIN,MAXSTM,STMNAM(1),STMID(1) - 11 FORMAT(/'****** ILLEGAL DATE IN NEWVIT, IOPT,IDTREQ,IHRREQ,', - 1 'IHRWIN,MAXSTM,STMNAM,STMID='/9X,5I10,2X,A9,2X,A3) - IERVIT=10 - ENDIF - - IF(IHRREQ .LT. 0) THEN - WRITE(6,21) IOPT,IDTREQ,IHRREQ,IHRWIN,MAXSTM,STMNAM(1),STMID(1) - 21 FORMAT(/'****** ILLEGAL HOUR IN NEWVIT, IOPT,IDTREQ,IHRREQ,', - 1 'IHRWIN,MAXSTM,STMNAM,STMID='/9X,5I10,2X,A9,2X,A3) - IERVIT=20 - ENDIF - - IF(IHRWIN .LT. 0) THEN - WRITE(6,31) IOPT,IDTREQ,IHRREQ,IHRWIN,MAXSTM,STMNAM(1),STMID(1) - 31 FORMAT(/'****** ILLEGAL WINDOW IN NEWVIT, IOPT,IDTREQ,IHRREQ,', - 1 'IHRWIN,MAXSTM,STMNAM,STMID='/9X,5I10,2X,A9,2X,A3) - IERVIT=30 - -C SET UP PARAMETERS FOR TIME WINDOW - - ELSE IF(IHRWIN .GT. 0) THEN - CALL ZTIME(IDTREQ,IHRREQ,IYRWIN,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYRWIN,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0, - $ 0/),1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAY0) - -C NORMAL CASE - - WINDOW=REAL(IHRWIN)/24. - DAYPLS=DAY0+WINDOW+FIVMIN - DAYMNS=DAY0-WINDOW-FIVMIN - ENDIF - ENDIF - - IF(IOPT .EQ. 2 .OR. IOPT .EQ. 3 .OR. IOPT .EQ. 6) THEN - IF(STMNAM(1) .EQ. ' ') THEN - WRITE(6,41) IOPT,IDTREQ,IHRREQ,IHRWIN,MAXSTM,STMNAM(1),STMID(1) - 41 FORMAT(/'****** ILLEGAL STMNAM IN NEWVIT, IOPT,IDTREQ,IHRREQ,', - 1 'IHRWIN,MAXSTM,STMNAM,STMID='/9X,5I10,2X,A9,2X,A3) - IERVIT=40 - - ELSE - STMREQ=STMNAM(1) - ENDIF - - ELSE IF(IOPT .EQ. 4 .OR. IOPT .EQ. 5) THEN - IF(STMID(1) .EQ. ' ') THEN - WRITE(6,51) IOPT,IDTREQ,IHRREQ,IHRWIN,MAXSTM,STMNAM(1),STMID(1) - 51 FORMAT(/'****** ILLEGAL STMID IN NEWVIT, IOPT,IDTREQ,IHRREQ,', - 1 'IHRWIN,MAXSTM,STMNAM,STMID='/9X,5I10,2X,A9,2X,A3) - IERVIT=50 - - ELSE - STMREQ=STMID(1) - ENDIF - - ELSE IF(IOPT .NE. 1) THEN - WRITE(6,61) IOPT,IDTREQ,IHRREQ,IHRWIN,MAXSTM,STMNAM(1),STMID(1) - 61 FORMAT(/'****** ILLEGAL OPTION IN NEWVIT, IOPT,IDTREQ,IHRREQ,', - 1 'IHRWIN,MAXSTM,STMNAM,STMID='/9X,5I10,2X,A9,2X,A3) - IERVIT=60 - ENDIF - -C FOR OPTIONS 3, 5, 6 (ALL OCCURRENCES OPTIONS), SEARCH IS -C RESTRICTED TO A SPECIFIC YEAR when idtreq is positive - - IF(IOPT .EQ. 3 .OR. IOPT .EQ. 5 .OR. IOPT .EQ. 6) - 1 IYRREQ=IDTREQ/10000 - -C ****** ERROR EXIT ****** - - IF(IERVIT .GT. 0) RETURN - -C INITIALIZE FILE AND COUNTERS - - 90 REWIND IOVITL - KREC=0 - KSTORM=0 - NERROR=0 - -C READ A RECORD INTO BUFFER - - 100 CONTINUE - - READ(IOVITL,101,ERR=990,END=200) (BUFIN(NCH),NCH=1,MAXCHR) - 101 FORMAT(95A1) - -C AT THIS POINT WE DO NOT KNOW IF A 2-DIGIT YEAR BEGINS IN COLUMN 20 -C OF THE RECORD (OLD NON-Y2K COMPLIANT FORM) OR IF A 4-DIGIT YEAR -C BEGINS IN COLUMN 20 (NEW Y2K COMPLIANT FORM) - TEST ON LOCATION OF -C LATITUDE N/S INDICATOR TO FIND OUT ... - - IF(BUFIN(35).EQ.'N' .OR. BUFIN(35).EQ.'S') THEN - -C ... THIS RECORD STILL CONTAINS THE OLD 2-DIGIT FORM OF THE YEAR -C ... THIS PROGRAM WILL CONVERT THE RECORD TO A 4-DIGIT YEAR USING THE -C "WINDOWING" TECHNIQUE SINCE SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 2-digit year "',BUFIN(20:21),'"' - PRINT *, ' ' - PRINT '(a,i0,a,a)', 'From unit ',iovitl,'; BUFIN-10: ',bufin - PRINT *, ' ' - BUFY2K(1:19) = BUFIN(1:19) - IF(BUFIN(20)//BUFIN(21).GT.'20') THEN - BUFY2K(20) = '1' - BUFY2K(21) = '9' - ELSE - BUFY2K(20) = '2' - BUFY2K(21) = '0' - ENDIF - BUFY2K(22:95) = BUFIN(20:93) - BUFIN = BUFY2K - PRINT *, ' ' - PRINT *, '==> 2-digit year converted to 4-digit year "', - $ BUFIN(20:23),'" via windowing technique' - PRINT *, ' ' - PRINT *, 'From unit ',iovitl,'; BUFIN-10: ',bufin - PRINT *, ' ' - - ELSE IF(BUFIN(37).EQ.'N' .OR. BUFIN(37).EQ.'S') THEN - -C ... THIS RECORD CONTAINS THE NEW 4-DIGIT FORM OF THE YEAR -C ... NO CONVERSION NECESSARY SINCE THIS SUBSEQUENT LOGIC EXPECTS THIS - - PRINT *, ' ' - PRINT *, '==> Read in RECORD from tcvitals file -- contains a', - $ ' 4-digit year "',BUFIN(20:23),'"' - PRINT *, ' ' - PRINT *, 'From unit ',iovitl,'; BUFIN-10: ',bufin - PRINT *, ' ' - PRINT *, '==> No conversion necessary' - PRINT *, ' ' - - ELSE - - PRINT *, ' ' - PRINT *, '***** Cannot determine if this record contains ', - $ 'a 2-digit year or a 4-digit year - skip it and try reading ', - $ 'the next record' - PRINT *, ' ' - GO TO 100 - - END IF - - KREC=KREC+1 - -C DECODE DATE AND TIME - - DO IV=1,2 - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) -c WRITE(6,109) IV,ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC, -c 1 FMTVIT(IV) -c 109 FORMAT(/'...DECODING VARIABLE #',I2,' ISTART,IEND,IVALUE,IER,', -c 1 'FMT=',2I4,I10,I3,2X,A10) - ENDDO - -C FILTER OUT RECORDS THAT ARE NOT GATHERED BY CURRENT OPTION - -C FIRST: DATE/TIME/WINDOW FILTER - - IF(IOPT .LE. 2 .OR. IOPT .EQ. 4) THEN - -C EXACT DATE/UTC ARE SPECIFIED - - IF(IHRWIN .EQ. 0) THEN -C WRITE(6,117) IDATEZ,IUTCZ -C 117 FORMAT(/'...NO WINDOW OPTION: IDATEZ,IUTCZ=',2I10) - IF(IDTREQ .NE. IDATEZ) GO TO 100 - IF(IHRREQ .NE. IUTCZ ) GO TO 100 - - ELSE - CALL ZTIME(IDATEZ,IUTCZ,IYR,IMO,IDA,IHR,IMIN) - CALL W3DIFDAT((/IYR,IMO,IDA,0,0,0,0,0/),(/1899,12,31,0,0,0,0,0/) - $ ,1,RINC) - JDY = NINT(RINC(1)) - CALL FLDAY(JDY,IHR,IMIN,DAYZ) - -C WRITE(6,119) IYR,IMO,IDA,IHR,IMIN,JDY,DAYZ,DAYMNS,DAYPLS,IYRMNS -C 119 FORMAT('...DEBUGGING WINDOW TIME SELECTION: IYR,IMO,IDA,IHR,', -C 1 'IMIN,JDY,DAYZ,DAYMNS,DAYPLS,IYRMNS='/15X,6I5,3F12.4,I5) - -C YEAR WINDOW, THEN FRACTIONAL JULIAN DAY WINDOW - - IF(IYR .NE. IYRWIN) GO TO 100 - IF(DAYZ .LT. DAYMNS .OR. DAYZ .GT. DAYPLS) GO TO 100 - ENDIF - ENDIF - -C SECOND: STORM NAME FILTER - - IF(IOPT .EQ. 2 .OR. IOPT .EQ. 3 .OR. IOPT .EQ. 6) THEN - IF(IPRNT .GT. 0) WRITE(6,123) STMNMZ,STMREQ - 123 FORMAT('...STORM NAME FILTER, STMNMZ,STMREQ=',A9,2X,A9) - IF(STMNMZ .NE. STMREQ) GO TO 100 - IF(IOPT .EQ. 3 .OR. IOPT .EQ. 6) then - if(iyrreq .gt. 0 .and. IDATEZ/10000 .NE. IYRREQ) go to 100 - endif - -C FOR OPTION 6, BRANCH BACK TO LOOK FOR STORM ID INSTEAD OF -C STORM NAME - - IF(IOPT .EQ. 6) THEN - IOPT=5 - STMREQ=STMIDZ - GO TO 90 - ENDIF - - ENDIF - -C THIRD: STORM ID FILTER - - IF(IOPT .EQ. 4 .AND. STMIDZ .NE. STMREQ) GO TO 100 - IF(IOPT .EQ. 5 .AND. (STMIDZ .NE. STMREQ .OR. (iyrreq .gt. 0 - 1 .and. IDATEZ/10000 .NE. IYRREQ))) GO TO 100 - -C EUREKA - - IF(IPRNT .GT. 0) WRITE(6,137) STMREQ,KREC - 137 FORMAT('...REQUESTED STORM FOUND, NAME/ID=',A9,' AT RECORD #',I6) - - DO IV=3,MAXVIT - CALL DECVAR(ISTVAR(IV),IENVAR(IV),IVTVAR(IV),IERDEC,FMTVIT(IV), - 1 BUFINZ) - VITVAR(IV)=REAL(IVTVAR(IV))*VITFAC(IV) - ENDDO - -C DEPTH OF CYCLONIC CIRCULATION - - IF(STMDPZ .EQ. 'S') THEN - PTOPZ=STMTOP(1) - ELSE IF(STMDPZ .EQ. 'M') THEN - PTOPZ=STMTOP(2) - ELSE IF(STMDPZ .EQ. 'D') THEN - PTOPZ=STMTOP(3) - ELSE IF(STMDPZ .EQ. 'X') THEN - PTOPZ=-99.0 -C WRITE(6,141) STMDPZ -C 141 FORMAT('******DEPTH OF CYCLONIC CIRCULATION HAS MISSING CODE=',A, -C 1 '.') - ELSE - WRITE(6,143) STMDPZ - 143 FORMAT('******ERROR DECODING DEPTH OF CYCLONIC CIRCULATION, ', - 1 'STMDPZ=',A1,'. ERROR RECOVERY NEEDED.') - ENDIF - -C ***************************************************** -C ***************************************************** -C **** IMPORTANT NOTES: **** -C **** **** -C **** ALL STORM LONGITUDES CONVERTED TO **** -C **** 0-360 DEGREES, POSITIVE EASTWARD !!! **** -C **** **** -C **** ALL STORM SPEEDS ARE IN M/SEC **** -C **** **** -C **** ALL DISTANCE DATA ARE IN KM **** -C **** **** -C **** ALL PRESSURE DATA ARE IN HPA (MB) **** -C ***************************************************** -C ***************************************************** - -C SIGN OF LATITUDE AND CONVERT LONGITUDE - - IF(LATNS .EQ. 'S') THEN - STMLTZ=-STMLTZ - ELSE IF(LATNS .NE. 'N') THEN - WRITE(6,153) STMLTZ,STMLNZ,LATNS - 153 FORMAT('******ERROR DECODING LATNS, ERROR RECOVERY NEEDED. ', - 1 'STMLTZ,STMLNZ,LATNS=',2F12.2,2X,A1) - GO TO 100 - ENDIF - - IF(LONEW .EQ. 'W') THEN - STMLNZ=360.-STMLNZ - ELSE IF(LONEW .NE. 'E') THEN - WRITE(6,157) STMLTZ,STMLNZ,LATNS - 157 FORMAT('******ERROR DECODING LONEW, ERROR RECOVERY NEEDED. ', - 1 'STMLTZ,STMLNZ,LATNS=',2F12.2,2X,A1) - ENDIF - - IF(IPRNT .EQ. 1) - 1 WRITE(6,161) IDATEZ,IUTCZ,STMLTZ,STMLNZ,STMDRZ,STMSPZ,PENVZ, - 2 PCENZ,RMAXZ,VMAXZ,RMWZ,R15NEZ,R15SEZ,R15SWZ,R15NWZ - 161 FORMAT('...ALL STORM DATA CALCULATED: IDATEZ,IUTCZ,STMLTZ,', - 1 'STMLNZ,STMDRZ,STMSPZ,PENVZ,PCENZ,RMAXZ,VMAXZ,RMWZ,', - 2 'R15NEZ,R15SEZ,R15SWZ,R15NWZ='/5X,2I10,13F8.2) - - IF(KSTORM .LT. MAXSTM) THEN - KSTORM=KSTORM+1 - IDATE(KSTORM)=IDATEZ - IUTC(KSTORM)=IUTCZ - PTOP(KSTORM)=PTOPZ - STMLAT(KSTORM)=STMLTZ - STMLON(KSTORM)=STMLNZ - STMDIR(KSTORM)=STMDRZ - STMSPD(KSTORM)=STMSPZ - STMNAM(KSTORM)=STMNMZ - STMID (KSTORM)=STMIDZ - RSMC (KSTORM)=RSMCZ - RMAX(KSTORM)=RMAXZ - PENV(KSTORM)=PENVZ - PCEN(KSTORM)=PCENZ - VMAX(KSTORM)=VMAXZ - RMW(KSTORM)=RMWZ - R15NE(KSTORM)=R15NEZ - R15SE(KSTORM)=R15SEZ - R15SW(KSTORM)=R15SWZ - R15NW(KSTORM)=R15NWZ - -C SET THE FIRST OCCURRENCE FLAG IF PRESENT - - IF(FSTFLZ .EQ. ':') THEN - FSTFLG(KSTORM)=.TRUE. - ELSE - FSTFLG(KSTORM)=.FALSE. - ENDIF - - GO TO 100 - - ELSE - GO TO 300 - ENDIF - - 200 CONTINUE - - IF(KSTORM .GT. 0) THEN - -C NORMAL RETURN HAVING FOUND REQUESTED STORM (S) AT DATE/TIME/WINDOW - - IF(IPRNT .EQ. 1) WRITE(6,201) STMREQ,IDTREQ,IHRREQ,KSTORM,KREC - 201 FORMAT(/'...FOUND STORM NAME/ID ',A12,' AT DATE, TIME=',I9,'/', - 1 I5,' UTC IN VITALS FILE.'/4X,I5,' RECORDS FOUND. ', - 2 'TOTAL NUMBER OF RECORDS READ=',I7) - RETURN - -C UNABLE TO FIND REQUESTED STORM AT DATE/TIME/WINDOW - - ELSE - IF(IOPT .EQ. 1) STMREQ='ALLSTORMS' - WRITE(6,207) IOPT,STMREQ,STMNMZ - 207 FORMAT(/'**** OPTION=',I3,' CANNOT FIND STORM NAME/ID=',A9, - 1 '... LAST STORM FOUND=',A9) - - WRITE(6,209) IDATEZ,IDTREQ,IUTCZ,IHRREQ - 209 FORMAT('**** CANNOT FIND REQUESTED DATE/TIME, (FOUND, ', - 1 'REQUESTED) (DATE/TIME)=',4I10/) - IERVIT=210 - RETURN - - ENDIF - - 300 WRITE(6,301) KSTORM - 301 FORMAT(/'******KSTORM EXCEEDS AVAILABLE SPACE, KSTORM=',I5) - RETURN - - 990 WRITE(6,991) BUFIN - 991 FORMAT('******ERROR READING STORM RECORD. BUFIN IS:'/' ******',A, - 1 '******') - NERROR=NERROR+1 - IF(NERROR .LE. IRDERM) GO TO 100 - IERVIT=990 - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: DECVAR DECODES VARIALES -C PRGMMR: D. A. KEYSER ORG: NP22 DATE: 2004-06-08 -C -C ABSTRACT: DECODES A PARTICULAR INTEGER VARIABLE FROM AN INPUT -C CHARACTER- BASED RECORD IN THE TROPICAL CYCLONE VITAL STATISTICS -C FILE. THIS IS DONE THROUGH AN INTERNAL READ. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C 2004-06-08 D. A. KEYSER - WHEN INTEGER VALUES ARE DECODED FROM -C CHARACTER-BASED RECORD VIA INTERNAL READ IN THIS SUBR., -C IF BYTE IN UNITS DIGIT LOCATION IS ERRONEOUSLY CODED AS -C BLANK (" "), IT IS REPLACED WITH A "5" IN ORDER TO -C PREVENT INVALID VALUE FROM BEING RETURNED (I.E., IF -C "022 " WAS READ, IT WAS DECODED AS "22", IT IS NOW -C DECODED AS "225" - THIS HAPPENED FOR VALUE OF RADIUS OF -C LAST CLOSED ISOBAR FOR JTWC RECORDS FROM 13 JULY 2000 -C THROUGH FNMOC FIX ON 26 MAY 2004 - THE VALUE WAS REPLACED -C BY CLIMATOLOGY BECAUSE IT FAILED A GROSS CHECK, HAD THIS -C CHANGE BEEN IN PLACE THE DECODED VALUE WOULD HAVE BEEN -C W/I 0.5 KM OF THE ACTUAL VALUE) -C -C USAGE: CALL DECVAR(ISTART,IEND,IVALUE,IERDEC,FMT,BUFF) -C INPUT ARGUMENT LIST: -C ISTART - INTEGER BYTE IN BUFF FROM WHICH TO BEGIN INTERNAL READ -C IEND - INTEGER BYTE IN BUFF FROM WHICH TO END INTERNAL READ -C FMT - CHARACTER*(*) FORMAT TO USE FOR INTERNAL READ -C BUFF - CHARACTER*(*) TROPICAL CYCLONE RECORD -C -C OUTPUT ARGUMENT LIST: -C IVALUE - INTEGER VALUE DECODED FROM BUFF -C IERDEC - ERROR RETURN CODE (= 0 - SUCCESSFUL DECODE, -C =10 - DECODE ERROR) -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: IF IERDEC = 10, IVALUE IS RETURNED AS -9, -99, -999 -C OR -9999999 DEPENDING UPON THE VALUE OF FMT. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE DECVAR(ISTART,IEND,IVALUE,IERDEC,FMT,BUFF) - - PARAMETER (NCHLIN=130) - - CHARACTER FMT*(*),BUFF*(*),BUFF_save*130,OUTLIN*1 - - SAVE - - DIMENSION OUTLIN(NCHLIN) - DIMENSION MISSNG(2:8) - - DATA MISSNG/-9,-99,-999,-9999,-99999,-999999,-9999999/ - -C WRITE(6,1) FMT,ISTART,IEND,BUFF -C 1 FORMAT(/'...FMT,ISTART,IEND=',A10,2I5/' ...BUFF=',A,'...') - - IF(BUFF(IEND:IEND).EQ.' ') THEN - BUFF_save = BUFF - BUFF(IEND:IEND) = '5' - WRITE(6,888) IEND - 888 FORMAT(/' ++++++DECVAR: WARNING -- BLANK (" ") CHARACTER READ IN', - 1 ' UNITS DIGIT IN BYTE',I4,' OF RECORD - CHANGE TO "5" ', - 2 'AND CONTINUE DECODE'/) - OUTLIN=' ' - OUTLIN(IEND:IEND)='5' - WRITE(6,'(130A1)') OUTLIN - WRITE(6,'(A130/)') BUFF_save(1:130) - ENDIF - - READ(BUFF(ISTART:IEND),FMT,ERR=10) IVALUE - - IERDEC=0 - - RETURN - - 10 CONTINUE - - OUTLIN=' ' - OUTLIN(ISTART:IEND)='*' - - IVALUE = -9999999 - K = IEND - ISTART + 1 - IF(K.GT.1 .AND. K.LT.9) IVALUE = MISSNG(K) - - WRITE(6,31) OUTLIN - WRITE(6,32) BUFF(1:130),IVALUE - 31 FORMAT(/' ******DECVAR: ERROR DECODING, BUFF='/130A1) - 32 FORMAT(A130/7X,'VALUE RETURNED AS ',I9/) - - IERDEC=10 - - RETURN - - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: TIMSUB PERFORMS TIME CHORES -C PRGMMR: D. A. KEYSER ORG: NP22 DATE: 1998-06-05 -C -C ABSTRACT: VARIOUS ENTRIES CONVERT 8 DIGIT YYYYMMDD INTO YEAR, MONTH -C AND DAY, AND FRACTIONAL JULIAN DAY FROM INTEGER JULIAN DAY, HOUR -C AND MINUTE. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C 1998-06-05 D. A. KEYSER - Y2K/F90 COMPLIANCE -C -C USAGE: CALL TIMSUB(IDATE,IUTC,IYR,IMO,IDA,IHR,IMIN,JDY,DAY) -C CALL FLDAY(JDY,IHR,IMIN,DAY) -C INPUT ARGUMENT LIST: -C IDATE - DATE IN FORM YYYYMMDD -C JDY - NUMBER OF DAYS SINCE 12/31/1899 -C -C OUTPUT ARGUMENT LIST: -C IYR - YEAR IN FORM YYYY -C IMO - MONTH OF YEAR -C IDA - DAY OF MONTH -C IHR - HOUR OF DAY -C IMIN - MINUTE OF HOUR -C DAY - FRACTIONAL NUMBER OF DAYS SINCE 12/31/1899 -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE TIMSUB(IDATE,IUTC,IYR,IMO,IDA,IHR,IMIN,JDY,DAY) - -C----------------------------------------------------------------------- - - ENTRY ZTIME(IDATE,IUTC,IYR,IMO,IDA,IHR,IMIN) - -C PARSE 8 DIGIT YYYYMMDD INTO YEAR MONTH AND DAY - - IYR = IDATE/10000 - IMO =(IDATE-IYR*10000)/100 - IDA = IDATE-IYR*10000-IMO*100 - IHR =IUTC/100 - IMIN=IUTC-IHR*100 - RETURN - -C----------------------------------------------------------------------- -C THIS ENTRY CALCULATES THE FRACTIONAL JULIAN DAY FROM INTEGERS -C JULIAN DAY, HOUR AND MINUTE (ACUALLY, JDY HERE IS NO. OF DAYS -C SINCE 12/31/1899) - - ENTRY FLDAY(JDY,IHR,IMIN,DAY) - DAY=REAL(JDY)+(REAL(IHR)*60.+REAL(IMIN))/1440. - RETURN - -C----------------------------------------------------------------------- - - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: YTIME GETS INTEGER YYYY, YYYYMMDD, HHMM -C PRGMMR: D. A. KEYSER ORG: NP22 DATE: 1998-10-29 -C -C ABSTRACT: CALCULATES 8-DIGIT INTEGER YYYYMMDD, 4-DIGIT INTEGER YYYY, -C AND 6-DIGIT INTEGER HHMMSS FROM FRACTIONAL NUMBER OF DAYS SINCE -C 12/31/1899 -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C 1998-10-29 D. A. KEYSER - Y2K/F90 COMPLIANCE -C -C USAGE: CALL YTIME(IYR,DAYZ,IDATE,JUTC) -C INPUT ARGUMENT LIST: -C DAYZ - FRACTIONAL NUMBER OF DAYS SINCE 12/31/1899 -C -C OUTPUT ARGUMENT LIST: -C IYR - YEAR (YYYY) -C IDATEZ - DATE IN FORM YYYYMMDD -C JUTC - DATE IN FORM HHMMSS -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE YTIME(IYR,DAYZ,IDATE,JUTC) - DIMENSION JDAT(8) - - CALL W3MOVDAT((/DAYZ,0.,0.,0.,0./),(/1899,12,31,0,0,0,0,0/),JDAT) - IYR = JDAT(1) - IMO = JDAT(2) - IDA = JDAT(3) - IHR = JDAT(5) - IMN = JDAT(6) - ISC = JDAT(7) - - IDATE=IDA+(100*IMO)+(10000*IYR) - JUTC =ISC+100*IMN+10000*IHR - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: SORTRL SORTS REAL NUMBERS -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-04 -C -C ABSTRACT: SORTS REAL NUMBERS. OUTPUT ARRAY IS THE INDEX OF -C THE INPUT VALUES THAT ARE SORTED. -C -C PROGRAM HISTORY LOG: -C 1991-06-04 S. J. LORD (MODIFIED FROM NCAR CODE) -C -C USAGE: CALL SORTRL(A,LA,NL) -C INPUT ARGUMENT LIST: -C A - ARRAY OF ELEMENTS TO BE SORTED. -C NL - NUMBER OF ELEMENTS TO BE SORTED. -C -C OUTPUT ARGUMENT LIST: -C LA - INTEGER ARRAY CONTAINING THE INDEX OF THE SORTED -C - ELEMENTS. SORTING IS FROM SMALL TO LARGE. E.G. -C - LA(1) CONTAINS THE INDEX OF THE SMALLEST ELEMENT IN -C - ARRAY. LA(NL) CONTAINS THE INDEX OF THE LARGEST. -C -C -C REMARKS: NONE -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE SORTRL(A,LA,NL) - -C ENTRY SORTRL(A,LA,NL) SORT UP REAL NUMBERS -C ** REVISED (6/13/84) FOR THE USE IN VAX-11 -C ARGUMENTS ... -C A INPUT ARRAY OF NL ELEMENTS TO BE SORTED OR RE-ORDERED -C LA OUTPUT ARRAY OF NL ELEMENTS IN WHICH THE ORIGINAL LOCATION -C OF THE SORTED ELEMENTS OF A ARE SAVED, OR -C INPUT ARRAY TO SPECIFY THE REORDERING OF ARRAY A BY SORTED -C NL THE NUMBER OF ELEMENTS TO BE TREATED - - SAVE - - DIMENSION A(NL),LA(NL),LS1(64),LS2(64) - DATA NSX/64/ - -C SET THE ORIGINAL ORDER IN LA - - DO L=1,NL - LA(L)=L - ENDDO - -C SEPARATE NEGATIVES FROM POSITIVES - - L = 0 - M = NL + 1 - 12 L = L + 1 - IF(L.GE.M) GO TO 19 - IF(A(L)) 12,15,15 - 15 M = M - 1 - IF(L.GE.M) GO TO 19 - IF(A(M)) 18,15,15 - 18 AZ = A(M) - A(M) = A(L) - A(L) = AZ - LZ = LA(M) - LA(M) = LA(L) - LA(L) = LZ - GO TO 12 - 19 L = L - 1 - -C NOTE THAT MIN AND MAX FOR INTERVAL (1,NL) HAVE NOT BEEN DETERMINED - - LS1(1) = 0 - L2 = NL + 1 - NS = 1 - -C STEP UP - - 20 LS1(NS) = LS1(NS) + 1 - LS2(NS) = L - NS = NS + 1 - IF(NS.GT.NSX) GO TO 80 - L1 = L + 1 - LS1(NS) = L1 - L2 = L2 - 1 - GO TO 40 - -C STEP DOWN - - 30 NS=NS-1 - IF (NS.LE.0) GO TO 90 - L1 = LS1(NS) - L2 = LS2(NS) - 40 IF(L2.LE.L1) GO TO 30 - -C FIND MAX AND MIN OF THE INTERVAL (L1,L2) - - IF (A(L1)-A(L2) .LE. 0) GO TO 52 - AN = A(L2) - LN = L2 - AX = A(L1) - LX = L1 - GO TO 54 - 52 AN = A(L1) - LN = L1 - AX = A(L2) - LX = L2 - 54 L1A = L1 + 1 - L2A = L2 - 1 - IF(L1A.GT.L2A) GO TO 60 - - DO L=L1A,L2A - IF (A(L)-AX .GT. 0) GO TO 56 - IF (A(L)-AN .GE. 0) GO TO 58 - AN = A(L) - LN = L - GO TO 58 - 56 AX = A(L) - LX = L - 58 CONTINUE - ENDDO - -C IF ALL ELEMENTS ARE EQUAL (AN=AX), STEP DOWN - - 60 IF (AN .EQ. AX) GO TO 30 - -C PLACE MIN AT L1, AND MAX AT L2 -C IF EITHER LN=L2 OR LX=L1, FIRST EXCHANGE L1 AND L2 - - IF(LN.EQ.L2.OR.LX.EQ.L1) GO TO 62 - GO TO 64 - 62 AZ=A(L1) - A(L1)=A(L2) - A(L2)=AZ - LZ=LA(L1) - LA(L1)=LA(L2) - LA(L2)=LZ - -C MIN TO L1, IF LN IS NOT AT EITHER END - - 64 IF(LN.EQ.L1.OR.LN.EQ.L2) GO TO 66 - A(LN)=A(L1) - A(L1)=AN - LZ=LA(LN) - LA(LN)=LA(L1) - LA(L1)=LZ - -C MAX TO L2, IF LX IS NOT AT EITHER END - - 66 IF(LX.EQ.L2.OR.LX.EQ.L1) GO TO 68 - A(LX)=A(L2) - A(L2)=AX - LZ=LA(LX) - LA(LX)=LA(L2) - LA(L2)=LZ - -C IF ONLY THREE ELEMENTS IN (L1,L2), STEP DOWN. - - 68 IF(L1A.GE.L2A) GO TO 30 - -C SET A CRITERION TO SPLIT THE INTERVAL (L1A,L2A) -C AC IS AN APPROXIMATE ARITHMETIC AVERAGE OF AX AND AN, -C PROVIDED THAT AX IS GREATER THAN AN. (IT IS THE CASE, HERE) -C ** IF A IS DISTRIBUTED EXPONENTIALLY, GEOMETRIC MEAN MAY -C BE MORE EFFICIENT - - AC = (AX+AN)/2 - -C MIN AT L1 AND MAX AT L2 ARE OUTSIDE THE INTERVAL - - L = L1 - M = L2 - 72 L = L + 1 - IF(L.GE.M) GO TO 78 -cc 73 CONTINUE - IF (A(L)-AC .LE. 0) GO TO 72 - 75 M = M - 1 - IF(L.GE.M) GO TO 78 -cc 76 CONTINUE - IF (A(M)-AC .GT. 0) GO TO 75 - AZ = A(M) - A(M) = A(L) - A(L) = AZ - LZ = LA(M) - LA(M) = LA(L) - LA(L) = LZ - GO TO 72 - -C SINCE 75 IS ENTERED ONLY IF 73 IS FALSE, 75 IS NOT TENTATIVE -C BUT 72 IS TENTATIVE, AND MUST BE CORRECTED IF NO FALSE 76 OCCURS - - 78 L = L - 1 - GO TO 20 - 80 WRITE(6,85) NSX - 85 FORMAT(/' === SORTING INCOMPLETE. SPLIT EXCEEDED',I3,' ==='/) - 90 RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: DS2UV CONVERTS DIRECTION/SPEED TO U/V MOTION -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: CONVERTS DIRECTION AND SPEED TO ZONAL AND MERIDIONAL -C MOTION. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE DS2UV(UZ,VZ,DIRZ,SPDZ) - -C THIS SUBROUTINE PRODUCES U, V CARTESIAN WINDS FROM DIRECTION,SPEED -C ****** IMPORTANT NOTE: DIRECTION IS DIRECTION WIND IS -C BLOWING, THE OPPOSITE OF METEOROLOGICAL CONVENTION *** - - UZ=SPDZ*SIND(DIRZ) - VZ=SPDZ*COSD(DIRZ) - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: ATAN2D ARC TAN FUNCTION FROM DEGREES INPUT -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: ARC TAN FUNCTION FROM DEGREES INPUT. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - FUNCTION ATAN2D(ARG1,ARG2) - -C DEGRAD CONVERTS DEGREES TO RADIANS - - DATA DEGRAD/0.017453/ - IF(ARG1 .EQ. 0.0 .AND. ARG2 .EQ. 0.0) THEN - ATAN2D=0.0 - ELSE - ATAN2D=ATAN2(ARG1,ARG2)/DEGRAD - ENDIF - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: SIND SINE FUNCTION FROM DEGREES INPUT -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: SINE FUNCTION FROM DEGREES INPUT. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - FUNCTION SIND(ARG) - -C DEGRAD CONVERTS DEGREES TO RADIANS - - DATA DEGRAD/0.017453/ - SIND=SIN(ARG*DEGRAD) - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: COSD COSINE FUNCTION FROM DEGREES INPUT -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: RETURNS COSINE FUNCTION FROM DEGREES INPUT -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - FUNCTION COSD(ARG) - -C DEGRAD CONVERTS DEGREES TO RADIANS - - DATA DEGRAD/0.017453/ - COSD=COS(ARG*DEGRAD) - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: DISTSP DISTANCE ON GREAT CIRCLE -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: CALCULATES DISTANCE ON GREAT CIRCLE BETWEEN TWO LAT/LON -C POINTS. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C -C USAGE: DXY=DISTSP(DLAT1,DLON1,DLAT2,DLON2) -C INPUT ARGUMENT LIST: -C DLAT1 - LATITUDE OF POINT 1 (-90<=LAT<=90) -C DLON1 - LONGITUDE OF POINT 1 (-180 TO 180 OR 0 TO 360) -C DLAT2 - LATITUDE OF POINT 2 (-90<=LAT<=90) -C DLON1 - LONGITUDE OF POINT 2 -C -C -C REMARKS: DISTANCE IS IN METERS -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - FUNCTION DISTSP(DLAT1,DLON1,DLAT2,DLON2) - DATA REARTH/6.37E6/ - - XXD=COSD(DLON1-DLON2)*COSD(DLAT1)*COSD(DLAT2)+ - 1 SIND(DLAT1)*SIND(DLAT2) - - XXM=AMIN1(1.0,AMAX1(-1.0,XXD)) - - DISTSP=ACOS(XXM)*REARTH - RETURN - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: AVGSUB CALCULATES AVERAGES -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-06 -C -C ABSTRACT: CALCULATES AVERAGES WEIGHTED AND UNWEIGHTED FOR ALL -C INPUT NUMBERS OR JUST POSITIVE ONES. -C -C PROGRAM HISTORY LOG: -C 1991-06-06 S. J. LORD -C -C USAGE: CALL PGM-NAME(INARG1, INARG2, WRKARG, OUTARG1, ... ) -C INPUT ARGUMENT LIST: -C INARG1 - GENERIC DESCRIPTION, INCLUDING CONTENT, UNITS, -C INARG2 - TYPE. EXPLAIN FUNCTION IF CONTROL VARIABLE. -C -C OUTPUT ARGUMENT LIST: (INCLUDING WORK ARRAYS) -C WRKARG - GENERIC DESCRIPTION, ETC., AS ABOVE. -C OUTARG1 - EXPLAIN COMPLETELY IF ERROR RETURN -C ERRFLAG - EVEN IF MANY LINES ARE NEEDED -C -C INPUT FILES: (DELETE IF NO INPUT FILES IN SUBPROGRAM) -C DDNAME1 - GENERIC NAME & CONTENT -C -C OUTPUT FILES: (DELETE IF NO OUTPUT FILES IN SUBPROGRAM) -C DDNAME2 - GENERIC NAME & CONTENT AS ABOVE -C FT06F001 - INCLUDE IF ANY PRINTOUT -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE AVGSUB(XX,WT,LX,AVX) - - DIMENSION XX(LX),WT(LX) - - AVX=0.0 - N=0 - DO L=1,LX - AVX=AVX+XX(L) - N=N+1 - ENDDO - AVX=AVX/REAL(N) - RETURN - -C----------------------------------------------------------------------- - - ENTRY WTAVRG(XX,WT,LX,AVX) - - AVX=0.0 - W=0.0 - DO L=1,LX - AVX=AVX+XX(L)*WT(L) - W=W+WT(L) - ENDDO - AVX=AVX/W - RETURN - -C----------------------------------------------------------------------- - - ENTRY WTAVGP(XX,WT,LX,AVX) - - AVX=0.0 - W=0.0 - DO L=1,LX - IF(XX(L) .GE. 0.0) THEN - AVX=AVX+XX(L)*WT(L) - W=W+WT(L) - ENDIF - ENDDO - IF(W .NE. 0.0) THEN - AVX=AVX/W - ELSE - AVX=XX(1) - ENDIF - RETURN - -C----------------------------------------------------------------------- - - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: ABORT1 ERROR EXIT ROUTINE -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-05 -C -C ABSTRACT: ERROR TERMINATION ROUTINE THAT LISTS ROUTINE WHERE -C ERROR OCCURRED AND THE NEAREST STATEMENT NUMBER. -C -C PROGRAM HISTORY LOG: -C 1991-06-05 S. J. LORD -C -C USAGE: CALL ABORT1(ME(KENTRY,ISTMT) -C INPUT ARGUMENT LIST: -C KENTRY - CHARACTER VARIABLE (*7) GIVING PROGRAM OR SUBROUTINE -C - WHERE ERROR OCCURRED. -C ISTMT - STATEMENT NUMBER NEAR WHERE ERROR OCCURRED. -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: THIS ROUTINE IS CALLED WHENEVER AN INTERNAL PROBLEM -C TO THE CODE IS FOUND. EXAMPLES ARE CALLING PARAMETERS THAT -C WILL OVERFLOW ARRAY BOUNDARIES AND OBVIOUS INCONSISTENCIES -C IN NUMBERS GENERATED BY THE CODE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE ABORT1(KENTRY,ISTMT) - CHARACTER*7 KENTRY - WRITE(6,10) KENTRY,ISTMT - 10 FORMAT(//21X,'*********************************************'/ - 1 21X,'*********************************************'/ - 2 21X,'**** PROGRAM FAILED DUE TO FATAL ERROR ****'/ - 3 21X,'**** IN ROUTINE ',A,' NEAR ****'/ - 4 21X,'**** STATEMENT NUMBER',I5,'. ****'/ - 5 21X,'*********************************************'/ - 6 21X,'*********************************************') - CALL W3TAGE('SYNDAT_QCTROPCY') - call ERREXIT (20) - END - -C$$$ SUBPROGRAM DOCUMENTATION BLOCK -C . . . . -C SUBPROGRAM: OFILE0 OPENS ALL DATA FILES LISTED IN TEXT FILE -C PRGMMR: S. J. LORD ORG: NP22 DATE: 1991-06-07 -C -C ABSTRACT: OPENS ALL OF THE DATA FILES READ FROM A LIST IN A TEXT -C FILE. -C -C PROGRAM HISTORY LOG: -C 1991-06-07 S. J. LORD -C -C USAGE: CALL OFILE0(IUNTOP,NFILMX,NFTOT,FILNAM) -C INPUT ARGUMENT LIST: -C IUNTOP - UNIT NUMBER OF TEXT FILE ASSOCIATING UNIT NUMBERS -C - WITH FILE NAMES -C FILNAM - FILE NAMES (UPON INPUT ONLY ELEMENT 0 STORED - -C - THE FILE NAME ASSOCIATED WITH UNIT IUNTOP) -C NFILMX - THE MAXIMUM NUMBER OF FILES THAT CAN BE OPENED IN -C - THIS SUBROUTINE -C -C OUTPUT ARGUMENT LIST: -C NFTOT - NUMBER OF DATA FILES OPENED IN THIS SUBROUTINE -C -C INPUT FILES: -C UNIT "IUNTOP" -C - TEXT FILE ASSOCIATING UNIT NUMBERS WITH FILE NAMES -C MANY - READ FROM LIST IN UNIT IUNTOP -C -C OUTPUT FILES: -C UNIT 06 - STANDARD OUTPUT PRINT -C -C REMARKS: NONE. -C -C ATTRIBUTES: -C MACHINE: IBM-SP -C LANGUAGE: FORTRAN 90 -C -C$$$ - SUBROUTINE OFILE0(IUNTOP,NFILMX,NFTOT,FILNAM) - - PARAMETER (IDGMAX=7) - - SAVE - - CHARACTER FILNAM*(*),CFORM*11,CSTAT*7,CACCES*10,MACHIN*10, - 1 CFZ*1,CSTZ*1,CACZ*1,CPOS*10 - - DIMENSION IUNIT(NFILMX),CFORM(NFILMX),CSTAT(NFILMX), - 1 CACCES(NFILMX),CPOS(NFILMX) - DIMENSION FILNAM(0:NFILMX) - - INTEGER(4) IARGC,NDEF - - NF=0 - -C DEFAULT FILENAME IS SPECIFIED BY THE CALLING PROGRAM. -C RUNNING THE PROGRAM WITH ARGUMENTS ALLOWS -C YOU TO SPECIFY THE FILENAM AS FOLLOWS: - - NDEF=IARGC() - - IF(NDEF .LT. 0) CALL GETARG(1_4,FILNAM(0)) - - LENG0=INDEX(FILNAM(0),' ')-1 - WRITE(6,5) NDEF,FILNAM(0)(1:LENG0) - 5 FORMAT(/'...SETTING UP TO READ I/O FILENAMES AND OPEN PARMS.', - 1 ' NDEF,FILNAM(0)=',I2,1X,'...',A,'...') - - OPEN(UNIT=IUNTOP,FORM='FORMATTED',STATUS='OLD',ACCESS= - 1 'SEQUENTIAL',FILE=FILNAM(0)(1:leng0),ERR=95,IOSTAT=IOS) - - READ(IUNTOP,11,ERR=90) MACHIN - 11 FORMAT(A) - WRITE(6,13) MACHIN - 13 FORMAT('...READY TO READ FILES TO OPEN ON MACHINE ',A) - - DO IFILE=1,NFILMX - NF=NF+1 - READ(IUNTOP,21,END=50,ERR=90,IOSTAT=IOS) IUNIT(NF), - 1 CFZ,CSTZ,CACZ,FILNAM(NF) - 21 FORMAT(I2,3(1X,A1),1X,A) - - LENGTH=INDEX(FILNAM(NF),' ')-1 - WRITE(6,23) NF,IUNIT(NF),CFZ,CSTZ,CACZ,FILNAM(NF)(1:LENGTH) - 23 FORMAT('...FOR FILE #',I3,', READING IUNIT, ABBREVIATIONS CFZ', - 1 ',CSTZ,CACZ='/4X,I3,3(1X,A,1X),5x,'...FILENAME=',A,'...') - -c Interpret the abbreviations - - if(CFZ .eq. 'f' .or. CFZ .eq. 'F') then - cform(nf)='FORMATTED' - else if(CFZ .eq. 'u' .or. CFZ .eq. 'U') then - cform(nf)='UNFORMATTED' - else - write(6,25) CFZ - 25 format('******option ',a,' for format is not allowed. Abort') - call abort1(' OFILE0',25) - endif - - if(CSTZ .eq. 'o' .or. CSTZ .eq. 'O') then - cstat(nf)='OLD' - else if(CSTZ .eq. 'n' .or. CSTZ .eq. 'N') then - cstat(nf)='NEW' - else if(CSTZ .eq. 'k' .or. CSTZ .eq. 'K') then - cstat(nf)='UNKNOWN' - else if(CSTZ .eq. 's' .or. CSTZ .eq. 'S') then - cstat(nf)='SCRATCH' - else - write(6,27) CSTZ - 27 format('******option ',a,' for status is not allowed. Abort') - call abort1(' OFILE0',27) - endif - - cpos(nf)=' ' - if(CACZ .eq. 'd' .or. CACZ .eq. 'D') then - cacces(nf)='DIRECT' - else if(CACZ .eq. 'q' .or. CACZ .eq. 'Q') then - cacces(nf)='SEQUENTIAL' - else if(CACZ .eq. 'a' .or. CACZ .eq. 'A') then - cacces(nf)='APPEND' - else if(CACZ .eq. 's' .or. CACZ .eq. 'S') then - cacces(nf)='SEQUENTIAL' - cpos(nf)='APPEND' - else if(CACZ .eq. 't' .or. CACZ .eq. 'T') then - cacces(nf)='DIRECT' - cpos(nf)='APPEND' - else - write(6,29) CACZ - 29 format('******option ',a,' for access is not allowed. Abort') - call abort1(' OFILE0',29) - endif - - IF(CACCES(NF) .NE. 'DIRECT') THEN - if(cpos(nf) .eq. ' ') then - if (cstat(nf).eq.'OLD') then - OPEN(UNIT=IUNIT(NF),FORM=cform(nf),STATUS='OLD', - 1 ACCESS=cacces(nf),FILE=FILNAM(NF)(1:LENGTH), - 2 ERR=95,IOSTAT=IOS) - elseif (cstat(nf).eq.'NEW') then - OPEN(UNIT=IUNIT(NF),FORM=cform(nf),STATUS='NEW', - 1 ACCESS=cacces(nf),FILE=FILNAM(NF)(1:LENGTH), - 2 ERR=95,IOSTAT=IOS) - elseif (cstat(nf).eq.'UNKNOWN') then - OPEN(UNIT=IUNIT(NF),FORM=cform(nf),STATUS='UNKNOWN', - 1 ACCESS=cacces(nf),FILE=FILNAM(NF)(1:LENGTH), - 2 ERR=95,IOSTAT=IOS) - else - OPEN(UNIT=IUNIT(NF),FORM=cform(nf),STATUS=cstat(nf), - 1 ACCESS=cacces(nf), - 2 ERR=95,IOSTAT=IOS) - endif - else - if (cstat(nf).eq.'OLD') then - open(unit=iunit(nf),form=cform(nf),status='OLD', - 1 access=cacces(nf),position=cpos(nf), - 2 file=filnam(nf)(1:length),err=95,iostat=ios) - elseif (cstat(nf).eq.'NEW') then - open(unit=iunit(nf),form=cform(nf),status='NEW', - 1 access=cacces(nf),position=cpos(nf), - 2 file=filnam(nf)(1:length),err=95,iostat=ios) - elseif (cstat(nf).eq.'UNKNOWN') then - open(unit=iunit(nf),form=cform(nf),status='UNKNOWN', - 1 access=cacces(nf),position=cpos(nf), - 2 file=filnam(nf)(1:length),err=95,iostat=ios) - else - open(unit=iunit(nf),form=cform(nf),status=cstat(nf), - 1 access=cacces(nf),position=cpos(nf), - 2 err=95,iostat=ios) - endif - endif - ELSE - read(filnam(nf)(length+2:length+2+idgmax-1),37) lrec - 37 format(i7) - write(6,39) lrec - 39 format('...Direct access record length:',i7,'...') - if(cpos(nf) .eq. ' ') then - if (cstat(nf).eq.'OLD') then - OPEN(UNIT=IUNIT(NF),FORM=CFORM(NF),STATUS='OLD', - 1 ACCESS=CACCES(NF),FILE=FILNAM(NF)(1:LENGTH), - 2 ERR=95,IOSTAT=IOS,RECL=lrec) - elseif (cstat(nf).eq.'NEW') then - OPEN(UNIT=IUNIT(NF),FORM=CFORM(NF),STATUS='NEW', - 1 ACCESS=CACCES(NF),FILE=FILNAM(NF)(1:LENGTH), - 2 ERR=95,IOSTAT=IOS,RECL=lrec) - elseif (cstat(nf).eq.'UNKNOWN') then - OPEN(UNIT=IUNIT(NF),FORM=CFORM(NF),STATUS='UNKNOWN', - 1 ACCESS=CACCES(NF),FILE=FILNAM(NF)(1:LENGTH), - 2 ERR=95,IOSTAT=IOS,RECL=lrec) - else - OPEN(UNIT=IUNIT(NF),FORM=CFORM(NF),STATUS=CSTAT(NF), - 1 ACCESS=CACCES(NF), - 2 ERR=95,IOSTAT=IOS,RECL=lrec) - endif - else - if (cstat(nf).eq.'OLD') then - open(unit=iunit(nf),form=cform(nf),status='OLD', - 1 access=cacces(nf),file=filnam(nf)(1:length), - 2 position=cpos(nf),err=95,iostat=ios,recl=lrec) - elseif (cstat(nf).eq.'NEW') then - open(unit=iunit(nf),form=cform(nf),status='NEW', - 1 access=cacces(nf),file=filnam(nf)(1:length), - 2 position=cpos(nf),err=95,iostat=ios,recl=lrec) - elseif (cstat(nf).eq.'UNKNOWN') then - open(unit=iunit(nf),form=cform(nf),status='UNKNOWN', - 1 access=cacces(nf),file=filnam(nf)(1:length), - 2 position=cpos(nf),err=95,iostat=ios,recl=lrec) - else - open(unit=iunit(nf),form=cform(nf),status=cstat(nf), - 1 access=cacces(nf), - 2 position=cpos(nf),err=95,iostat=ios,recl=lrec) - endif - endif - ENDIF - ENDDO - - WRITE(6,391) NFILMX - 391 FORMAT('******NUMBER OF FILES TO BE OPENED MEETS OR EXCEEDS ', - 1 'MAXIMUM SET BY PROGRAM (=',I3) - CALL ABORT1(' OFILE0',50) - - 50 CONTINUE - -C WE HAVE DEFINED AND OPENED ALL FILES - - NFTOT=NF-1 - WRITE(6,51) NFTOT,MACHIN - 51 FORMAT(/'...SUCCESSFULLY OPENED ',I3,' FILES ON ',A) - RETURN - - 90 CONTINUE - WRITE(6,91) FILNAM(0)(1:leng0),ios - 91 FORMAT('******ERROR READING OPEN FILE=',A,' error=',i4) - CALL ABORT1(' OFILE0',91) - - 95 CONTINUE - WRITE(6,96) NF,IOS - 96 FORMAT('******ERROR UPON OPENING FILE, NF,IOS=',2I5) - CALL ABORT1(' OFILE0',96) - - END diff --git a/sorc/tave.fd/makefile b/sorc/tave.fd/makefile deleted file mode 100755 index 3ccaf4b87b..0000000000 --- a/sorc/tave.fd/makefile +++ /dev/null @@ -1,25 +0,0 @@ -SHELL= /bin/sh -ISIZE = 4 -RSIZE = 8 -COMP= ifort -##INC = /contrib/nceplibs/nwprod/lib/incmod/g2_d -##LIBS= -L/contrib/nceplibs/nwprod/lib -lw3emc_d -lw3nco_d -lg2_d -lbacio_4 -ljasper -lpng -lz -LDFLAGS= -# DEBUG= -check all -debug all -traceback -FFLAGS= -O2 -g -traceback -I $(INC) -i$(ISIZE) -r$(RSIZE) -# FFLAGS= -O3 -I $(INC) -i$(ISIZE) -r$(RSIZE) - -tave: tave.f - @echo " " - @echo " Compiling the interpolation program....." - $(COMP) $(FFLAGS) $(LDFLAGS) tave.f $(LIBS) -o tave.x - @echo " " - -CMD = tave.x - -clean: - -rm -f *.o *.mod - -install: - mv $(CMD) ../../exec/$(CMD) - diff --git a/sorc/tave.fd/tave.f b/sorc/tave.fd/tave.f deleted file mode 100755 index bbf5263463..0000000000 --- a/sorc/tave.fd/tave.f +++ /dev/null @@ -1,1083 +0,0 @@ - program tave -c -c ABSTRACT: This program averages the temperatures from an input -c grib file and produces an output grib file containing the mean -c temperature in the 300-500 mb layer. For each model and each -c lead time, there will need to be data from 300 to 500 mb in -c 50 mb increments, such that all 5 of these layers then get -c averaged together. -c -c Written by Tim Marchok - - USE params - USE grib_mod - - implicit none - - type(gribfield) :: holdgfld - integer, parameter :: lugb=11,lulv=16,lugi=31,lout=51 - integer, parameter :: nlevsout=1,nlevsin=5 - integer kpds(200),kgds(200) - integer iriret,iogret,kf,iggret,igdret,iidret,gribver,g2_jpdtn - integer iha,iho,iva,irfa,iodret,ifcsthour,iia,iparm - integer ilevs(nlevsin) - real, allocatable :: xinptmp(:,:),xouttmp(:) - logical(1), allocatable :: valid_pt(:),readflag(:) - real xoutlev - - namelist/timein/ifcsthour,iparm,gribver,g2_jpdtn -c - data ilevs /300, 350, 400, 450, 500/ - xoutlev = 401. -c - read (5,NML=timein,END=201) - 201 continue - print *,' ' - print *,'*---------------------------------------------*' - print *,' ' - print *,' +++ Top of tave +++ ' - print *,' ' - print *,'After tave namelist read, input forecast hour= ' - & ,ifcsthour - print *,' input GRIB parm= ',iparm - print *,' GRIB version= ',gribver - print *,' GRIB2 JPDTN= g2_jpdtn= ' - & ,g2_jpdtn - -c ilevs = -999 -c call read_input_levels (lulv,nlevsin,ilevs,iriret) -c -c if (iriret /= 0) then -c print *,' ' -c print *,'!!! RETURN CODE FROM read_input_levels /= 0' -c print *,'!!! RETURN CODE = iriret = ',iriret -c print *,'!!! EXITING....' -c print *,' ' -c goto 899 -c endif - - call open_grib_files (lugb,lugi,lout,gribver,iogret) - if (iogret /= 0) then - print '(/,a35,a5,i4,/)','!!! ERROR: in tave open_grib_files,' - & ,' rc= ',iogret - goto 899 - endif - call getgridinfo (lugb,lugi,kf,kpds,kgds,holdgfld,ifcsthour,iparm - & ,gribver,g2_jpdtn,iggret) - - allocate (xinptmp(kf,nlevsin),stat=iha) - allocate (xouttmp(kf),stat=iho) - allocate (valid_pt(kf),stat=iva) - allocate (readflag(nlevsin),stat=irfa) - if (iha /= 0 .or. iho /= 0 .or. iva /= 0 .or. irfa /= 0) then - print *,' ' - print *,'!!! ERROR in tave allocating arrays.' - print *,'!!! ERROR allocating the xinptmp, readflag, or the' - print *,'!!! valid_pt array, iha= ',iha,' iva= ',iva - print *,'!!! irfa= ',irfa,' iho= ',iho - print *,' ' - goto 899 - endif - - call getdata (lugb,lugi,kf,valid_pt,nlevsin,ilevs - & ,readflag,xinptmp,ifcsthour,iparm,gribver - & ,g2_jpdtn,igdret) - - call average_data (kf,valid_pt,nlevsin,ilevs,readflag - & ,xinptmp,xouttmp,iidret) - - call output_data (lout,kf,kpds,kgds,holdgfld,xouttmp,valid_pt - & ,xoutlev,nlevsout,gribver,ifcsthour,iodret) - - deallocate (xinptmp) - deallocate (xouttmp) - deallocate (valid_pt) - deallocate (readflag) - - 899 continue -c - stop - end -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine read_input_levels (lulv,nlevsin,ilevs,iriret) -c -c ABSTRACT: This subroutine reads in a text file that contains -c the number of input pressure levels for a given model. The -c format of the file goes like this, from upper levels to -c lower, for example: -c -c 1 200 -c 2 400 -c 3 500 -c 4 700 -c 5 850 -c 6 925 -c 7 1000 -c -c - implicit none - - integer lulv,nlevsin,iriret,inplev,ict,lvix - integer ilevs(nlevsin) -c - iriret=0 - ict = 0 - do while (.true.) - - print *,'Top of while loop in tave read_input_levels' - - read (lulv,85,end=130) lvix,inplev - - if (inplev > 0 .and. inplev <= 1000) then - ict = ict + 1 - ilevs(ict) = inplev - else - print *,' ' - print *,'!!! ERROR: Input level not between 0 and 1000' - print *,'!!! in tave. inplev= ',inplev - print *,'!!! STOPPING EXECUTION' - STOP 91 - endif - - print *,'tave readloop, ict= ',ict,' inplev= ',inplev - - enddo - - 85 format (i4,1x,i4) - 130 continue - - nlevsin = ict - - print *,' ' - print *,'Total number of tave levels read in = ',nlevsin -c - return - end - -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine getgridinfo (lugb,lugi,kf,kpds,kgds,holdgfld,ifcsthour - & ,iparm,gribver,g2_jpdtn,iggret) -c -c ABSTRACT: The purpose of this subroutine is just to get the max -c values of i and j and the dx and dy grid spacing intervals for the -c grid to be used in the rest of the program. So just read the -c grib file to get the lon and lat data. Also, get the info for -c the data grids boundaries. This boundary information will be -c used later in the tracking algorithm, and is accessed via Module -c grid_bounds. -c -C INPUT: -C lugb The Fortran unit number for the GRIB data file -C lugi The Fortran unit number for the GRIB index file -c ifcsthour input forecast hour to search for -c iparm input grib parm to search for -c gribver integer (1 or 2) to indicate if using GRIB1 / GRIB2 -c g2_jpdtn If GRIB2 data being read, this is the value for JPDTN -c that is input to getgb2. -C -C OUTPUT: -c kf Number of gridpoints on the grid -c kpds pds array for a GRIB1 record -c kgds gds array for a GRIB1 record -c holdgfld info for a GRIB2 record -c -C iggret The return code from this subroutine -c - USE params - USE grib_mod - - implicit none -c - CHARACTER(len=8) :: ctemp - CHARACTER(len=80) :: ftemplate - type(gribfield) :: gfld,prevfld,holdgfld - integer,dimension(200) :: jids,jpdt,jgdt - logical(1), allocatable :: lb(:) - integer, parameter :: jf=4000000 - integer jpds(200),jgds(200) - integer kpds(200),kgds(200) - integer :: listsec1(13) - integer ila,ifa,iret,ifcsthour,imax,jmax,jskp,jdisc - integer lugb,lugi,kf,j,k,iggret,iparm,gribver,g2_jpdtn - integer jpdtn,jgdtn,npoints,icount,ipack,krec - integer :: listsec0(2)=(/0,2/) - integer :: igds(5)=(/0,0,0,0,0/),previgds(5) - integer :: idrstmpl(200) - integer :: currlen=1000000 - logical :: unpack=.true. - logical :: open_grb=.false. - real, allocatable :: f(:) - real dx,dy -c - iggret = 0 - - allocate (lb(jf),stat=ila) - allocate (f(jf),stat=ifa) - if (ila /= 0 .or. ifa /= 0) then - print *,' ' - print *,'!!! ERROR in tave.' - print *,'!!! ERROR in getgridinfo allocating either lb or f' - print *,'!!! ila = ',ila,' ifa= ',ifa - iggret = 97 - return - endif - - if (gribver == 2) then - - ! Search for a record from a GRIB2 file - - ! - ! --- Initialize Variables --- - ! - - gfld%idsect => NULL() - gfld%local => NULL() - gfld%list_opt => NULL() - gfld%igdtmpl => NULL() - gfld%ipdtmpl => NULL() - gfld%coord_list => NULL() - gfld%idrtmpl => NULL() - gfld%bmap => NULL() - gfld%fld => NULL() - - jdisc=0 ! Meteorological products - jids=-9999 - jpdtn=g2_jpdtn ! 0 = analysis or forecast; 1 = ens fcst - jgdtn=0 ! lat/lon grid - jgdt=-9999 - jpdt=-9999 - - npoints=0 - icount=0 - jskp=0 - -c Search for Temperature by production template 4.0 - - JPDT(1:15)=(/ -9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999 - & ,-9999,-9999,-9999,-9999,-9999,-9999,-9999/) - - call getgb2(lugb,lugi,jskp,jdisc,jids,jpdtn,jpdt,jgdtn,jgdt - & ,unpack,krec,gfld,iret) - if ( iret.ne.0) then - print *,' ' - print *,' ERROR: getgb2 error in getgridinfo = ',iret - endif - -c Determine packing information from GRIB2 file -c The default packing is 40 JPEG 2000 - - ipack = 40 - - print *,' gfld%idrtnum = ', gfld%idrtnum - - ! Set DRT info ( packing info ) - if ( gfld%idrtnum.eq.0 ) then ! Simple packing - ipack = 0 - elseif ( gfld%idrtnum.eq.2 ) then ! Complex packing - ipack = 2 - elseif ( gfld%idrtnum.eq.3 ) then ! Complex & spatial packing - ipack = 31 - elseif ( gfld%idrtnum.eq.40.or.gfld%idrtnum.eq.15 ) then - ! JPEG 2000 packing - ipack = 40 - elseif ( gfld%idrtnum.eq.41 ) then ! PNG packing - ipack = 41 - endif - - print *,'After check of idrtnum, ipack= ',ipack - - print *,'Number of gridpts= gfld%ngrdpts= ',gfld%ngrdpts - print *,'Number of elements= gfld%igdtlen= ',gfld%igdtlen - print *,'PDT num= gfld%ipdtnum= ',gfld%ipdtnum - print *,'GDT num= gfld%igdtnum= ',gfld%igdtnum - - imax = gfld%igdtmpl(8) - jmax = gfld%igdtmpl(9) - dx = float(gfld%igdtmpl(17))/1.e6 - dy = float(gfld%igdtmpl(17))/1.e6 - kf = gfld%ngrdpts - - holdgfld = gfld - - else - - ! Search for a record from a GRIB1 file - - jpds = -1 - jgds = -1 - - j=0 - - jpds(5) = iparm ! Get a temperature record - jpds(6) = 100 ! Get a record on a standard pressure level - jpds(14) = ifcsthour - - call getgb(lugb,lugi,jf,j,jpds,jgds, - & kf,k,kpds,kgds,lb,f,iret) - - if (iret.ne.0) then - print *,' ' - print *,'!!! ERROR in tave getgridinfo calling getgb' - print *,'!!! Return code from getgb = iret = ',iret - iggret = iret - return - else - iggret=0 - imax = kgds(2) - jmax = kgds(3) - dx = float(kgds(9))/1000. - dy = float(kgds(10))/1000. - endif - - endif - - print *,' ' - print *,'In getgridinfo, grid dimensions follow:' - print *,'imax= ',imax,' jmax= ',jmax - print *,' dx= ',dx,' dy= ',dy - print *,'number of gridpoints = ',kf - - deallocate (lb); deallocate(f) - - return - end - -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine getdata (lugb,lugi,kf,valid_pt,nlevsin,ilevs - & ,readflag,xinptmp,ifcsthour,iparm,gribver - & ,g2_jpdtn,igdret) -c -c ABSTRACT: This subroutine reads the input GRIB file for the -c tracked parameters. - - USE params - USE grib_mod - - implicit none -c - type(gribfield) :: gfld,prevfld - CHARACTER(len=8) :: ctemp,pabbrev - CHARACTER(len=80) :: ftemplate - integer,dimension(200) :: jids,jpdt,jgdt - integer, parameter :: jf=4000000 - integer ilevs(nlevsin) - integer jpds(200),jgds(200),kpds(200),kgds(200) - integer lugb,lugi,kf,nlevsin,igdret,iparm,jskp,jdisc - integer jpdtn,jgdtn,npoints,icount,ipack,krec - integer i,j,k,ict,np,lev,ifcsthour,iret,gribver,g2_jpdtn - integer pdt_4p0_vert_level,pdt_4p0_vtime,mm - integer :: listsec0(2)=(/0,2/) - integer :: listsec1(13) - integer :: igds(5)=(/0,0,0,0,0/),previgds(5) - integer :: idrstmpl(200) - integer :: currlen=1000000 - logical :: unpack=.true. - logical :: open_grb=.false. - logical(1) valid_pt(kf),lb(kf),readflag(nlevsin) - real f(kf),xinptmp(kf,nlevsin),xtemp(kf) - real dmin,dmax,firstval,lastval -c - igdret=0 - ict = 0 - - print *,'At top of getdata, ifcsthour= ',ifcsthour - - level_loop: do lev = 1,nlevsin - - print *,' ' - print *,'------------------------------------------------' - print *,'In tave getdata read loop, lev= ',lev,' level= ' - & ,ilevs(lev) - - if (gribver == 2) then - - ! - ! --- Initialize Variables --- - ! - - gfld%idsect => NULL() - gfld%local => NULL() - gfld%list_opt => NULL() - gfld%igdtmpl => NULL() - gfld%ipdtmpl => NULL() - gfld%coord_list => NULL() - gfld%idrtmpl => NULL() - gfld%bmap => NULL() - gfld%fld => NULL() - - jdisc=0 ! Meteorological products - jids=-9999 - jpdtn=g2_jpdtn ! 0 = analysis or forecast; 1 = ens fcst - jgdtn=0 ! lat/lon grid - jgdt=-9999 - jpdt=-9999 - - npoints=0 - icount=0 - jskp=0 - -c Search for input parameter by production template 4.0. This -c tave program is used primarily for temperature, but still we -c will leave that as a variable and not-hard wire it in case we -c choose to average something else in the future. - - if (iparm == 11) then - - ! Set defaults for JPDT, then override in array - ! assignments below... - - JPDT(1:15)=(/ -9999,-9999,-9999,-9999,-9999,-9999,-9999 - & ,-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999/) - JPDT(1) = 0 ! Param category from Table 4.1 - JPDT(2) = 0 ! Param number from Table 4.2 - JPDT(9) = ifcsthour - JPDT(10) = 100 ! Isobaric surface requested (Table 4.5) - JPDT(12) = ilevs(lev) * 100 ! value of specific level - - print *,'In getdata, just set JPDT inputs....' - - endif - - print *,'before getgb2 call, value of unpack = ',unpack - - do mm = 1,15 - print *,'tave getdata mm= ',mm,' JPDT(mm)= ',JPDT(mm) - enddo - - call getgb2(lugb,lugi,jskp,jdisc,jids,jpdtn,jpdt,jgdtn,jgdt - & ,unpack,krec,gfld,iret) - - print *,'iret from getgb2 in getdata = ',iret - - print *,'after getgb2 call, value of unpacked = ' - & ,gfld%unpacked - - print *,'after getgb2 call, gfld%ndpts = ',gfld%ndpts - print *,'after getgb2 call, gfld%ibmap = ',gfld%ibmap - - if ( iret == 0) then - -c Determine packing information from GRIB2 file -c The default packing is 40 JPEG 2000 - - ipack = 40 - - print *,' gfld%idrtnum = ', gfld%idrtnum - - ! Set DRT info ( packing info ) - if ( gfld%idrtnum.eq.0 ) then ! Simple packing - ipack = 0 - elseif ( gfld%idrtnum.eq.2 ) then ! Complex packing - ipack = 2 - elseif ( gfld%idrtnum.eq.3 ) then ! Complex & spatial - & ! packing - ipack = 31 - elseif ( gfld%idrtnum.eq.40.or.gfld%idrtnum.eq.15 ) then - ! JPEG 2000 packing - ipack = 40 - elseif ( gfld%idrtnum.eq.41 ) then ! PNG packing - ipack = 41 - endif - - print *,'After check of idrtnum, ipack= ',ipack - - print *,'Number of gridpts= gfld%ngrdpts= ',gfld%ngrdpts - print *,'Number of elements= gfld%igdtlen= ',gfld%igdtlen - print *,'GDT num= gfld%igdtnum= ',gfld%igdtnum - - kf = gfld%ndpts ! Number of gridpoints returned from read - - do np = 1,kf - xinptmp(np,lev) = gfld%fld(np) - xtemp(np) = gfld%fld(np) - if (gfld%ibmap == 0) then - valid_pt(np) = gfld%bmap(np) - else - valid_pt(np) = .true. - endif - enddo - - readflag(lev) = .TRUE. -c call bitmapchk(kf,gfld%bmap,gfld%fld,dmin,dmax) - call bitmapchk(kf,valid_pt,xtemp,dmin,dmax) - - if (ict == 0) then -c do np = 1,kf -c valid_pt(np) = gfld%bmap(np) -c enddo - ict = ict + 1 - endif - - firstval=gfld%fld(1) - lastval=gfld%fld(kf) - - print *,' ' - print *,' SECTION 0: discipl= ',gfld%discipline - & ,' gribver= ',gfld%version - - print *,' ' - print *,' SECTION 1: ' - - do j = 1,gfld%idsectlen - print *,' sect1, j= ',j,' gfld%idsect(j)= ' - & ,gfld%idsect(j) - enddo - - if ( associated(gfld%local).AND.gfld%locallen.gt.0) then - print *,' ' - print *,' SECTION 2: ',gfld%locallen,' bytes' - else - print *,' ' - print *,' SECTION 2 DOES NOT EXIST IN THIS RECORD' - endif - - print *,' ' - print *,' SECTION 3: griddef= ',gfld%griddef - print *,' ngrdpts= ',gfld%ngrdpts - print *,' numoct_opt= ',gfld%numoct_opt - print *,' interp_opt= ',gfld%interp_opt - print *,' igdtnum= ',gfld%igdtnum - print *,' igdtlen= ',gfld%igdtlen - - print *,' ' - print '(a17,i3,a2)',' GRID TEMPLATE 3.',gfld%igdtnum,': ' - do j=1,gfld%igdtlen - print *,' j= ',j,' gfld%igdtmpl(j)= ',gfld%igdtmpl(j) - enddo - - print *,' ' - print *,' PDT num (gfld%ipdtnum) = ',gfld%ipdtnum - print *,' ' - print '(a20,i3,a2)',' PRODUCT TEMPLATE 4.',gfld%ipdtnum,': ' - do j=1,gfld%ipdtlen - print *,' sect 4 j= ',j,' gfld%ipdtmpl(j)= ' - & ,gfld%ipdtmpl(j) - enddo - -c Print out values for data representation type - - print *,' ' - print '(a21,i3,a2)',' DATA REP TEMPLATE 5.',gfld%idrtnum - & ,': ' - do j=1,gfld%idrtlen - print *,' sect 5 j= ',j,' gfld%idrtmpl(j)= ' - & ,gfld%idrtmpl(j) - enddo - - pdt_4p0_vtime = gfld%ipdtmpl(9) - pdt_4p0_vert_level = gfld%ipdtmpl(12) - -c Get parameter abbrev for record that was retrieved - - pabbrev=param_get_abbrev(gfld%discipline,gfld%ipdtmpl(1) - & ,gfld%ipdtmpl(2)) - - print *,' ' - write (6,131) - 131 format (' rec# param level byy bmm bdd bhh ' - & ,'fhr npts firstval lastval minval ' - & ,' maxval') - print '(i5,3x,a8,2x,6i5,2x,i8,4g12.4)' - & ,krec,pabbrev,pdt_4p0_vert_level/100,gfld%idsect(6) - & ,gfld%idsect(7),gfld%idsect(8),gfld%idsect(9) - & ,pdt_4p0_vtime,gfld%ndpts,firstval,lastval,dmin,dmax - -c do np = 1,kf -c xinptmp(np,lev) = gfld%fld(np) -c enddo - - else - - print *,' ' - print *,'!!! ERROR: GRIB2 TAVE READ IN GETDATA FAILED FOR ' - & ,'LEVEL LEV= ',LEV - print *,' ' - - readflag(lev) = .FALSE. - - do np = 1,kf - xinptmp(np,lev) = -99999.0 - enddo - - endif - - else - - ! Reading a GRIB1 file.... - - jpds = -1 - jgds = -1 - j=0 - - jpds(5) = iparm ! parameter id for temperature - jpds(6) = 100 ! level id to indicate a pressure level - jpds(7) = ilevs(lev) ! actual level of the layer - jpds(14) = ifcsthour ! lead time to search for - - call getgb (lugb,lugi,jf,j,jpds,jgds, - & kf,k,kpds,kgds,lb,f,iret) - - print *,' ' - print *,'After tave getgb call, j= ',j,' k= ',k,' level= ' - & ,ilevs(lev),' iret= ',iret - - if (iret == 0) then - - readflag(lev) = .TRUE. - call bitmapchk(kf,lb,f,dmin,dmax) - - if (ict == 0) then - do np = 1,kf - valid_pt(np) = lb(np) - enddo - ict = ict + 1 - endif - - write (6,31) - 31 format (' rec# parm# levt lev byy bmm bdd bhh fhr ' - & ,'npts minval maxval') - print '(i4,2x,8i5,i8,2g12.4)', - & k,(kpds(i),i=5,11),kpds(14),kf,dmin,dmax - - do np = 1,kf - xinptmp(np,lev) = f(np) - enddo - - else - - print *,' ' - print *,'!!! ERROR: TAVE READ FAILED FOR LEVEL LEV= ',LEV - print *,' ' - - readflag(lev) = .FALSE. - - do np = 1,kf - xinptmp(np,lev) = -99999.0 - enddo - - endif - - endif - - enddo level_loop -c - return - end -c -c----------------------------------------------------------------------- -c -c----------------------------------------------------------------------- - subroutine average_data (kf,valid_pt,nlevsin,ilevs,readflag - & ,xinptmp,xouttmp,iidret) -c -c ABSTRACT: This routine averages data between 300 and 500 mb to get -c a mean temperature at 400 mb. The input data should be at 50 mb -c resolution, giving 5 input levels in total. - - implicit none - - logical(1) valid_pt(kf),readflag(nlevsin) - integer ilevs(nlevsin) - integer nlevsin,kf,k,n,iidret - real xinptmp(kf,nlevsin),xouttmp(kf) - real xinlevs_p(nlevsin),xinlevs_lnp(nlevsin) - real xsum -c - iidret=0 - print *,'*----------------------------------------------*' - print *,' Top of average data routine' - print *,'*----------------------------------------------*' - print *,' ' - - do n = 1,kf - xsum = 0.0 -c print *,' ' - do k = 1,nlevsin - xsum = xsum + xinptmp(n,k) -c print *,'n= ',n,' k= ',k,' xsum= ',xsum - enddo - xouttmp(n) = xsum / float(nlevsin) -c print *,'n= ',n,' mean= ',xouttmp(n) - enddo -c - return - end -c -c---------------------------------------------------------------------- -c -c---------------------------------------------------------------------- - subroutine output_data (lout,kf,kpds,kgds,holdgfld,xouttmp - & ,valid_pt,xoutlev,nlevsout,gribver,ifcsthour,iodret) -c -c ABSTRACT: This routine writes out the output data on the -c specified output pressure levels. - - USE params - USE grib_mod - - implicit none - - CHARACTER(len=1),pointer,dimension(:) :: cgrib -c CHARACTER(len=1),pointer,allocatable :: cgrib(:) - type(gribfield) :: holdgfld - logical(1) valid_pt(kf),bmap(kf) - integer lout,kf,lugb,lugi,iodret,nlevsout,igoret,ipret,lev - integer gribver,ierr,ipack,lengrib,npoints,newlen,idrsnum - integer numcoord,ica,n,j,ifcsthour - integer :: idrstmpl(200) - integer :: currlen=1000000 - integer :: listsec0(2)=(/0,2/) - integer :: igds(5)=(/0,0,0,0,0/),previgds(5) - integer kpds(200),kgds(200) - integer(4), parameter::idefnum=1 - integer(4) ideflist(idefnum),ibmap - real xouttmp(kf),xoutlev,coordlist -c - iodret=0 - call baopenw (lout,"fort.51",igoret) - print *,'baopenw: igoret= ',igoret - - if (igoret /= 0) then - print *,' ' - print *,'!!! ERROR in sub output_data opening' - print *,'!!! **OUTPUT** grib file. baopenw return codes:' - print *,'!!! grib file 1 return code = igoret = ',igoret - STOP 95 - return - endif - - if (gribver == 2) then - - ! Write data out as a GRIB2 message.... - - allocate(cgrib(currlen),stat=ica) - if (ica /= 0) then - print *,' ' - print *,'ERROR in output_data allocating cgrib' - print *,'ica= ',ica - iodret=95 - return - endif - - - ! Ensure that cgrib array is large enough - - if (holdgfld%ifldnum == 1 ) then ! start new GRIB2 message - npoints=holdgfld%ngrdpts - else - npoints=npoints+holdgfld%ngrdpts - endif - newlen=npoints*4 - if ( newlen.gt.currlen ) then -ccc if (allocated(cgrib)) deallocate(cgrib) - if (associated(cgrib)) deallocate(cgrib) - allocate(cgrib(newlen),stat=ierr) -c call realloc (cgrib,currlen,newlen,ierr) - if (ierr == 0) then - print *,' ' - print *,'re-allocate for large grib msg: ' - print *,' currlen= ',currlen - print *,' newlen= ',newlen - currlen=newlen - else - print *,'ERROR returned from 2nd allocate cgrib = ',ierr - stop 95 - endif - endif - - ! Create new GRIB Message - listsec0(1)=holdgfld%discipline - listsec0(2)=holdgfld%version - - print *,'output, holdgfld%idsectlen= ',holdgfld%idsectlen - do j = 1,holdgfld%idsectlen - print *,' sect1, j= ',j,' holdgfld%idsect(j)= ' - & ,holdgfld%idsect(j) - enddo - - call gribcreate(cgrib,currlen,listsec0,holdgfld%idsect,ierr) - if (ierr.ne.0) then - write(6,*) ' ERROR creating new GRIB2 field (gribcreate)= ' - & ,ierr - stop 95 - endif - - previgds=igds - igds(1)=holdgfld%griddef - igds(2)=holdgfld%ngrdpts - igds(3)=holdgfld%numoct_opt - igds(4)=holdgfld%interp_opt - igds(5)=holdgfld%igdtnum - - if (igds(3) == 0) then - ideflist = 0 - endif - - call addgrid (cgrib,currlen,igds,holdgfld%igdtmpl - & ,holdgfld%igdtlen,ideflist,idefnum,ierr) - - if (ierr.ne.0) then - write(6,*) ' ERROR from addgrid adding GRIB2 grid = ',ierr - stop 95 - endif - - - holdgfld%ipdtmpl(12) = int(xoutlev) * 100 - - ipack = 40 - idrsnum = ipack - idrstmpl = 0 - - idrstmpl(2)= holdgfld%idrtmpl(2) - idrstmpl(3)= holdgfld%idrtmpl(3) - idrstmpl(6)= 0 - idrstmpl(7)= 255 - - numcoord=0 - coordlist=0.0 ! Only needed for hybrid vertical coordinate, - ! not here, so set it to 0.0 - - ! 0 - A bit map applies to this product and is specified in - ! this section - ! 255 - A bit map does not apply to this product - ibmap=255 ! Bitmap indicator (see Code Table 6.0) - - print *,' ' - print *,'output, holdgfld%ipdtlen= ',holdgfld%ipdtlen - do n = 1,holdgfld%ipdtlen - print *,'output, n= ',n,' holdgfld%ipdtmpl= ' - & ,holdgfld%ipdtmpl(n) - enddo - - print *,'output, kf= ',kf - -c if (ifcsthour < 6) then -c do n = 1,kf -cc print *,'output, n= ',n,' xouttmp(n)= ',xouttmp(n) -c write (92,151) n,xouttmp(n) -c 151 format (1x,'n= ',i6,' xouttmp(n)= ',f10.4) -c enddo -c endif - - call addfield (cgrib,currlen,holdgfld%ipdtnum,holdgfld%ipdtmpl - & ,holdgfld%ipdtlen,coordlist - & ,numcoord - & ,idrsnum,idrstmpl,200 - & ,xouttmp,kf,ibmap,bmap,ierr) - - if (ierr /= 0) then - write(6,*) ' ERROR from addfield adding GRIB2 data = ',ierr - stop 95 - endif - -! Finalize GRIB message after all grids -! and fields have been added. It adds the End Section ( "7777" ) - - call gribend(cgrib,currlen,lengrib,ierr) - call wryte(lout,lengrib,cgrib) - - if (ierr == 0) then - print *,' ' - print *,'+++ GRIB2 write successful. ' - print *,' Len of message = currlen= ',currlen - print *,' Len of entire GRIB2 message = lengrib= ',lengrib - else - print *,' ERROR from gribend writing GRIB2 msg = ',ierr - stop 95 - endif - - else - - ! Write data out as a GRIB1 message.... - - kpds(6) = 100 - - do lev = 1,nlevsout - - kpds(7) = int(xoutlev) - - print *,'tave: just before call to putgb, kf= ',kf - - print *,'output, kf= ',kf -c do n = 1,kf -c print *,'output, n= ',n,' xouttmp(n)= ',xouttmp(n) -c enddo - - if (ifcsthour < 6) then - do n = 1,kf -c print *,'output, n= ',n,' xouttmp(n)= ',xouttmp(n) - write (91,161) n,xouttmp(n) - 161 format (1x,'n= ',i6,' xouttmp(n)= ',f10.4) - enddo - endif - - call putgb (lout,kf,kpds,kgds,valid_pt,xouttmp,ipret) - print *,'tave: just after call to putgb, kf= ',kf - if (ipret == 0) then - print *,' ' - print *,'+++ IPRET = 0 after call to putgb' - print *,' ' - else - print *,' ' - print *,'!!!!!! ERROR in tave' - print *,'!!!!!! ERROR: IPRET NE 0 AFTER CALL TO PUTGB !!!' - print *,'!!!!!! Level index= ',lev - print *,'!!!!!! pressure= ',xoutlev - print *,' ' - endif - - write(*,980) kpds(1),kpds(2) - write(*,981) kpds(3),kpds(4) - write(*,982) kpds(5),kpds(6) - write(*,983) kpds(7),kpds(8) - write(*,984) kpds(9),kpds(10) - write(*,985) kpds(11),kpds(12) - write(*,986) kpds(13),kpds(14) - write(*,987) kpds(15),kpds(16) - write(*,988) kpds(17),kpds(18) - write(*,989) kpds(19),kpds(20) - write(*,990) kpds(21),kpds(22) - write(*,991) kpds(23),kpds(24) - write(*,992) kpds(25) - write(*,880) kgds(1),kgds(2) - write(*,881) kgds(3),kgds(4) - write(*,882) kgds(5),kgds(6) - write(*,883) kgds(7),kgds(8) - write(*,884) kgds(9),kgds(10) - write(*,885) kgds(11),kgds(12) - write(*,886) kgds(13),kgds(14) - write(*,887) kgds(15),kgds(16) - write(*,888) kgds(17),kgds(18) - write(*,889) kgds(19),kgds(20) - write(*,890) kgds(21),kgds(22) - - enddo - - 980 format(' kpds(1) = ',i7,' kpds(2) = ',i7) - 981 format(' kpds(3) = ',i7,' kpds(4) = ',i7) - 982 format(' kpds(5) = ',i7,' kpds(6) = ',i7) - 983 format(' kpds(7) = ',i7,' kpds(8) = ',i7) - 984 format(' kpds(9) = ',i7,' kpds(10) = ',i7) - 985 format(' kpds(11) = ',i7,' kpds(12) = ',i7) - 986 format(' kpds(13) = ',i7,' kpds(14) = ',i7) - 987 format(' kpds(15) = ',i7,' kpds(16) = ',i7) - 988 format(' kpds(17) = ',i7,' kpds(18) = ',i7) - 989 format(' kpds(19) = ',i7,' kpds(20) = ',i7) - 990 format(' kpds(21) = ',i7,' kpds(22) = ',i7) - 991 format(' kpds(23) = ',i7,' kpds(24) = ',i7) - 992 format(' kpds(25) = ',i7) - 880 format(' kgds(1) = ',i7,' kgds(2) = ',i7) - 881 format(' kgds(3) = ',i7,' kgds(4) = ',i7) - 882 format(' kgds(5) = ',i7,' kgds(6) = ',i7) - 883 format(' kgds(7) = ',i7,' kgds(8) = ',i7) - 884 format(' kgds(9) = ',i7,' kgds(10) = ',i7) - 885 format(' kgds(11) = ',i7,' kgds(12) = ',i7) - 886 format(' kgds(13) = ',i7,' kgds(14) = ',i7) - 887 format(' kgds(15) = ',i7,' kgds(16) = ',i7) - 888 format(' kgds(17) = ',i7,' kgds(18) = ',i7) - 889 format(' kgds(19) = ',i7,' kgds(20) = ',i7) - 890 format(' kgds(20) = ',i7,' kgds(22) = ',i7) - - endif -c - return - end -c -c----------------------------------------------------------------------- -c -c----------------------------------------------------------------------- - subroutine open_grib_files (lugb,lugi,lout,gribver,iret) - -C ABSTRACT: This subroutine must be called before any attempt is -C made to read from the input GRIB files. The GRIB and index files -C are opened with a call to baopenr. This call to baopenr was not -C needed in the cray version of this program (the files could be -C opened with a simple Cray assign statement), but the GRIB-reading -C utilities on the SP do require calls to this subroutine (it has -C something to do with the GRIB I/O being done in C on the SP, and -C the C I/O package needs an explicit open statement). -C -C INPUT: -C lugb The Fortran unit number for the GRIB data file -C lugi The Fortran unit number for the GRIB index file -C lout The Fortran unit number for the output grib file -c gribver integer (1 or 2) to indicate if using GRIB1 / GRIB2 -C -C OUTPUT: -C iret The return code from this subroutine - - implicit none - - character fnameg*7,fnamei*7,fnameo*7 - integer iret,gribver,lugb,lugi,lout,igoret,iioret,iooret - - iret=0 - fnameg(1:5) = "fort." - fnamei(1:5) = "fort." - fnameo(1:5) = "fort." - write(fnameg(6:7),'(I2)') lugb - write(fnamei(6:7),'(I2)') lugi - write(fnameo(6:7),'(I2)') lout - call baopenr (lugb,fnameg,igoret) - call baopenr (lugi,fnamei,iioret) - call baopenw (lout,fnameo,iooret) - - print *,' ' - print *,'tave baopen: igoret= ',igoret,' iioret= ',iioret - & ,' iooret= ',iooret - - if (igoret /= 0 .or. iioret /= 0 .or. iooret /= 0) then - print *,' ' - print *,'!!! ERROR in tave' - print *,'!!! ERROR in sub open_grib_files opening grib file' - print *,'!!! or grib index file. baopen return codes:' - print *,'!!! grib file return code = igoret = ',igoret - print *,'!!! index file return code = iioret = ',iioret - print *,'!!! output file return code = iooret = ',iooret - iret = 93 - return - endif - - return - end -c -c------------------------------------------------------------------- -c -c------------------------------------------------------------------- - subroutine bitmapchk (n,ld,d,dmin,dmax) -c -c This subroutine checks the bitmap for non-existent data values. -c Since the data from the regional models have been interpolated -c from either a polar stereographic or lambert conformal grid -c onto a lat/lon grid, there will be some gridpoints around the -c edges of this lat/lon grid that have no data; these grid -c points have been bitmapped out by Mark Iredell's interpolater. -c To provide another means of checking for invalid data points -c later in the program, set these bitmapped data values to a -c value of -999.0. The min and max of this array are also -c returned if a user wants to check for reasonable values. -c - logical(1) ld - dimension ld(n),d(n) -c - dmin=1.E15 - dmax=-1.E15 -c - do i=1,n - if (ld(i)) then - dmin=min(dmin,d(i)) - dmax=max(dmax,d(i)) - else - d(i) = -999.0 - endif - enddo -c - return - end diff --git a/sorc/tocsbufr.fd/makefile_module b/sorc/tocsbufr.fd/makefile_module deleted file mode 100755 index 06f5ba7092..0000000000 --- a/sorc/tocsbufr.fd/makefile_module +++ /dev/null @@ -1,82 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 11:21:07 AM on 10/28/94 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -SRCS= tocsbufr.f - -OBJS= tocsbufr.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = $(myFC) -LDFLAGS = $(myFCFLAGS) -LIBS = $(W3EMC_LIB4) \ - $(W3NCO_LIB4) \ - $(BUFR_LIB4) \ - $(BACIO_LIB4) \ - $(SP_LIB4) \ - $(SIGIO_LIB) -CMD = ../../exec/tocsbufr -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = $(FFLAGSM) -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(PROFLIB) $(LIBS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/sorc/tocsbufr.fd/tocsbufr.f b/sorc/tocsbufr.fd/tocsbufr.f deleted file mode 100755 index 0f1914cd1a..0000000000 --- a/sorc/tocsbufr.fd/tocsbufr.f +++ /dev/null @@ -1,272 +0,0 @@ - PROGRAM TOCSBUFR -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C . . . . -C MAIN PROGRAM: TOCSBUFR -C PRGMMR: GILBERT ORG: NP11 DATE: 2004-02-23 -C -C ABSTRACT: Reads each BUFR message from a standard fortran blocked (f77) -C file and adds a TOC -C Flag Field separator block and WMO Header in front of each BUFR -C field, and writes them out to a new file. The output file -C is in the format required for TOC's FTP Input Service, which -C can be used to disseminate the BUFR messages. -C This service is described at http://weather.gov/tg/ftpingest.html. -C -C TOCSBUFR contains two options that are selected using -C a namelist on unit 5 ( see INPUT FILES below ): -C 1) The specified WMO HEADER can be added to each BUFR -C message in the file OR once at the beginning of the -C file. -C 2) The BUFR messages can be used "as is", or if they -C in NCEP format they can be "standardized" for external -C users. -C -C PROGRAM HISTORY LOG: -C 2001-03-01 Gilbert modified from WMOGRIB -C 2004-02-23 Gilbert modified from WMOBUFR to write out BUFR -C messages in the NTC/FTP Input Service format -C instead of the old STATFILE format. -C 2005-04-07 Gilbert This version was created from original program -C TOCBUFR. A new more thorough "standardizing" -C routine is being used to create WMO standard -C BUFR messages for AWIPS. -C 2009-06-16 J. Ator The program was modified in response to BUFRLIB -C changes, including a change to the WRITSA call -C sequence. Also added a call to MAXOUT to stop -C BUFR messages larger than 10k bytes from being -C truncated when standardizing. The program can -C now standardize BUFR messages as large as the -C MAXOUT limit without any loss of data. -C 2012-12-06 J. Ator modified for WCOSS -C -C USAGE: -C INPUT FILES: -C 5 - STANDARD INPUT - NAMELIST /INPUT/. -C BULHED = "TTAAII" part of WMO Header (CHAR*6) -C KWBX = "CCCC" orig center part of WMO Header (CHAR*4) -C NCEP2STD = .true. - will convert NCEP format -C BUFR messages to standard WMO -C format. -C = .false. - No conversion done to BUFR -C messages. -C SEPARATE = .true. - Add Flag Field Separator and WMO -C Header to each BUFR message in -C file. -C = .false. - Add Flag Field Separator and WMO -C Header once at beginning of -C output file. -C MAXFILESIZE = Max size of output file in bytes. -C Used only when SEPARATE = .false. -C 11 - INPUT BUFR FILE -C -C OUTPUT FILES: (INCLUDING SCRATCH FILES) -C 6 - STANDARD FORTRAN PRINT FILE -C 51 - AWIPS BUFR FILE WITH WMO HEADERS ADDED -C -C SUBPROGRAMS CALLED: (LIST ALL CALLED FROM ANYWHERE IN CODES) -C UNIQUE: - makwmo mkfldsep -C LIBRARY: -C W3LIB - W3TAGB W3UTCDAT -C W3TAGE -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN -C 19 - ERROR READING COMMAND LINE ARGS FOR WMOHEADER -C 20 - Error opening output BUFR transmission file -C 30 - NO BUFR MESSSAGES FOUND -C -C REMARKS: This utility was written for the ETA BUFR sounding -C collectives, and assumes all BUFR messages in the input -C file require the same WMO Header. -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN 90 -C MACHINE: WCOSS -C -C$$$ -C - PARAMETER (MXSIZE=500000,MXSIZED4=MXSIZE/4) - INTEGER,PARAMETER :: INBUFR=11,OUTBUFR=51,TMPBUFR=91,iopt=2 -C - INTEGER,dimension(8):: ITIME=(/0,0,0,-500,0,0,0,0/) - INTEGER,dimension(MXSIZED4):: MBAY - INTEGER NBUL - INTEGER iday,hour - INTEGER :: MAXFILESIZE=1000000 -C - CHARACTER * 80 fileo - CHARACTER * 11 envvar - CHARACTER * 8 SUBSET - CHARACTER * 6 :: BULHED="CHEK12" - CHARACTER * 1 BUFR(MXSIZE) - CHARACTER * 4 :: ctemp,KWBX="OUTT" - CHARACTER * 1 CSEP(80) - integer,parameter :: lenhead=21 - CHARACTER * 1 WMOHDR(lenhead) - character*1,allocatable :: filebuf(:) - LOGICAL :: NCEP2STD=.false.,SEPARATE=.true. -C - EQUIVALENCE (BUFR(1), MBAY(1)) -C - NAMELIST /INPUT/ BULHED,KWBX,NCEP2STD,SEPARATE,MAXFILESIZE -C - CALL W3TAGB('TOCSBUFR',2012,0341,0083,'NP12') -C -C Read input values from namelist -C - READ(5,INPUT) - - PRINT * - PRINT *,'- Adding WMO Header: ',BULHED,' ',KWBX - IF (NCEP2STD) then - print *,'- Convert BUFR messages from NCEP format to standard', - & ' BUFR Format.' - else - print *,'- No conversion of BUFR messages will be done.' - endif - IF (SEPARATE) then - print *,'- Add Flag Field Separator and WMO Header to each ', - & 'BUFR message in file.' - else - print *,'- Add Flag Field Separator and WMO Header once at', - & ' beginning of file.' - allocate(filebuf(MAXFILESIZE)) - endif - PRINT * - -C -C Read output BUFR file name from FORT -C environment variable, and open file. -C - envvar='FORT ' - write(envvar(5:6),fmt='(I2)') outbufr - call get_environment_variable(envvar,fileo) - call baopenw(outbufr,fileo,iret1) - if ( iret1 .ne. 0 ) then - write(6,fmt='(" Error opening BUFR file: ",A80)') fileo - write(6,fmt='(" baopenw error = ",I5)') iret1 - stop 20 - endif -C -C Open input NCEP formatted BUFR file, if NCEP2STD = .true. -C - if (NCEP2STD) then - call OPENBF(INBUFR,'IN',INBUFR) - CALL MAXOUT(0) - call OPENBF(TMPBUFR,'NUL',INBUFR) - CALL STDMSG('Y') - endif - -C -C Get system date and time -C - call w3utcdat(itime) -C -C loop through input control records. -C - NBUL = 0 - nrec = 0 - itot = 0 - foreachbufrmessage: do - - if (NCEP2STD) then - if ( IREADMG (INBUFR,SUBSET,JDATE) .ne. 0 ) exit - if ( NMSUB(INBUFR) .gt. 0 ) then - nrec = nrec + 1 - CALL OPENMG (TMPBUFR,SUBSET,JDATE) - DO WHILE ( ICOPYSB(INBUFR,TMPBUFR) .eq. 0 ) - CONTINUE - END DO - CALL WRITSA( (-1)*TMPBUFR, MXSIZED4, MBAY, LMBAY) - else - cycle - endif - else - read(INBUFR,iostat=ios) BUFR -C print *,'Error reading message from input BUFR file.', -C & ' iostat = ',ios - if ( ios .le. 0 ) then - exit - endif - nrec = nrec + 1 - endif -C -C Extract BUFR edition number - ied = iupbs01(MBAY,'BEN') -C Calculate length of BUFR message - if (ied.le.1) then - call getlens(MBAY,5,len0,len1,len2,len3,len4,len5) - ILEN = len0+len1+len2+len3+len4+len5 - else - ILEN = iupbs01(MBAY,'LENM') - endif -C Check ending 7777 to see if we have a complete BUFR message - ctemp=BUFR(ILEN-3)//BUFR(ILEN-2)//BUFR(ILEN-1)//BUFR(ILEN) - if ( ctemp.ne.'7777') then - print *,' INVALID BUFR MESSAGE FOUND...SKIPPING ' - exit - endif -C -C MAKE WMO HEADER -C - iday=ITIME(3) - hour=ITIME(5) - CALL MAKWMO (BULHED,iday,hour,KWBX,WMOHDR) -C - NBUL = NBUL + 1 -C - IF (SEPARATE) THEN -C -C ADD Flag Field Separator AND WMO HEADERS -C TO BUFR MESSAGE. WRITE BUFR MESSAGE IN FILE -C - call mkfldsep(csep,iopt,insize,ilen+lenhead,lenout) - call wryte(outbufr,lenout,csep) - call wryte(outbufr,lenhead,WMOHDR) - call wryte(outbufr,ilen,bufr) - ELSE -C -C APPEND NEW BUFR MESSAGE TO filebuf ARRAY -C - if ((itot+ilen).lt.(MAXFILESIZE-101)) then - filebuf(itot+1:itot+ilen)=BUFR(1:ilen) - itot=itot+ilen - else - print *,' Internal Buffer of ',MAXFILESIZE,' bytes is ', - & 'full. Increase MAXFILESIZE in NAMELIST.' - exit - endif - ENDIF -C - enddo foreachbufrmessage -C - IF (.not.SEPARATE) THEN -C -C ADD Flag Field Separator AND WMO HEADERS -C TO BUFR MESSAGE. WRITE BUFR MESSAGE IN FILE -C - call mkfldsep(csep,iopt,insize,itot+lenhead,lenout) - call wryte(outbufr,lenout,csep) - call wryte(outbufr,lenhead,WMOHDR) - call wryte(outbufr,itot,filebuf) - deallocate(filebuf) - ENDIF -C -C* CLOSING SECTION -C - IF (NBUL .EQ. 0 ) THEN - WRITE (6,FMT='('' SOMETHING WRONG WITH INPUT BUFR FILE...'', - & ''NOTHING WAS PROCESSED'')') - CALL W3TAGE('TOCSBUFR') - call errexit(30) - ELSE - CALL BACLOSE (OUTBUFR,iret) - WRITE (6,FMT='(//,'' ******** RECAP OF THIS EXECUTION '', - & ''********'',/,5X,''READ '',I6,'' BUFR MESSAGES'', - & /,5X,''WROTE '',I6,'' BULLETINS OUT FOR TRANSMISSION'', - & //)') NREC, NBUL - ENDIF -C - CALL W3TAGE('TOCSBUFR') - STOP - END diff --git a/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_thompson.xml b/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_thompson.xml new file mode 100644 index 0000000000..db415a6cf4 --- /dev/null +++ b/sorc/ufs_model.fd_gsl/FV3/ccpp/suites/suite_FV3_GFS_v17_p8_thompson.xml @@ -0,0 +1,95 @@ + + + + + + + GFS_time_vary_pre + GFS_rrtmg_setup + GFS_rad_time_vary + GFS_phys_time_vary + + + + + GFS_suite_interstitial_rad_reset + GFS_rrtmg_pre + GFS_radiation_surface + rad_sw_pre + rrtmg_sw + rrtmg_sw_post + rrtmg_lw_pre + rrtmg_lw + rrtmg_lw_post + GFS_rrtmg_post + + + + + GFS_suite_interstitial_phys_reset + GFS_suite_stateout_reset + get_prs_fv3 + GFS_suite_interstitial_1 + GFS_surface_generic_pre + GFS_surface_composites_pre + dcyc2t3 + GFS_surface_composites_inter + GFS_suite_interstitial_2 + + + + sfc_diff + GFS_surface_loop_control_part1 + sfc_nst_pre + sfc_nst + sfc_nst_post + noahmpdrv + sfc_sice + GFS_surface_loop_control_part2 + + + + GFS_surface_composites_post + sfc_diag + sfc_diag_post + GFS_surface_generic_post + GFS_PBL_generic_pre + satmedmfvdifq + GFS_PBL_generic_post + GFS_GWD_generic_pre + unified_ugwp + unified_ugwp_post + GFS_GWD_generic_post + GFS_suite_stateout_update + ozphys_2015 + h2ophys + get_phi_fv3 + GFS_suite_interstitial_3 + GFS_DCNV_generic_pre + samfdeepcnv + GFS_DCNV_generic_post + GFS_SCNV_generic_pre + samfshalcnv + GFS_SCNV_generic_post + GFS_suite_interstitial_4 + cnvc90 + GFS_MP_generic_pre + mp_thompson_pre + + + mp_thompson + + + mp_thompson_post + GFS_MP_generic_post + maximum_hourly_diagnostics + + + + + GFS_stochastics + phys_tend + + + + diff --git a/sorc/ufs_model.fd_gsl/FV3/upp/parm/postcntrl_gfs.xml b/sorc/ufs_model.fd_gsl/FV3/upp/parm/postcntrl_gfs.xml index 63a80c0771..c0827caf16 100755 --- a/sorc/ufs_model.fd_gsl/FV3/upp/parm/postcntrl_gfs.xml +++ b/sorc/ufs_model.fd_gsl/FV3/upp/parm/postcntrl_gfs.xml @@ -314,8 +314,8 @@ HLCY_ON_SPEC_HGT_LVL_ABOVE_GRND - 3000. - 0. + 3000. 1000. + 0. 0. 4.0 @@ -445,7 +445,7 @@ - VIS_ON_SURFACE + GSD_VIS_ON_SURFACE 6.0 @@ -1199,6 +1199,71 @@ 3.0 + + ACM_GRAUPEL_ON_SURFACE + 6.0 + + + + BUCKET_GRAUPEL_ON_SURFACE + 6.0 + + + + ACM_FRAIN_ON_SURFACE + 4.0 + + + + BUCKET_FRAIN_ON_SURFACE + 4.0 + + + + ACM_SNOWFALL_ON_SURFACE + 4.0 + + + + BUCKET_SNOWFALL_ON_SURFACE + 4.0 + + + + AER_OPT_GFS_at550 + 9.0 + + + + DUST_AER_OPT_GFS_at550 + 9.0 + + + + SEASALT_AER_OPT_GFS_at550 + 9.0 + + + + SULFATE_AER_OPT_GFS_at550 + 9.0 + + + + ORGANIC_CARBON_AER_OPT_GFS_at550 + 9.0 + + + + BLACK_CARBON_AER_OPT_GFS_at550 + 9.0 + + + + SDEN_ON_SURFACE + 6.0 + + diff --git a/sorc/ufs_model.fd_gsl/FV3/upp/parm/postcntrl_gfs_two.xml b/sorc/ufs_model.fd_gsl/FV3/upp/parm/postcntrl_gfs_two.xml index 9350b14bcd..a38c0ed00b 100755 --- a/sorc/ufs_model.fd_gsl/FV3/upp/parm/postcntrl_gfs_two.xml +++ b/sorc/ufs_model.fd_gsl/FV3/upp/parm/postcntrl_gfs_two.xml @@ -314,8 +314,8 @@ HLCY_ON_SPEC_HGT_LVL_ABOVE_GRND - 3000. - 0. + 3000. 1000. + 0. 0. 4.0 @@ -445,7 +445,7 @@ - VIS_ON_SURFACE + GSD_VIS_ON_SURFACE 6.0 @@ -1199,6 +1199,71 @@ 3.0 + + ACM_GRAUPEL_ON_SURFACE + 6.0 + + + + BUCKET_GRAUPEL_ON_SURFACE + 6.0 + + + + ACM_FRAIN_ON_SURFACE + 4.0 + + + + BUCKET_FRAIN_ON_SURFACE + 4.0 + + + + ACM_SNOWFALL_ON_SURFACE + 4.0 + + + + BUCKET_SNOWFALL_ON_SURFACE + 4.0 + + + + AER_OPT_GFS_at550 + 9.0 + + + + DUST_AER_OPT_GFS_at550 + 9.0 + + + + SEASALT_AER_OPT_GFS_at550 + 9.0 + + + + SULFATE_AER_OPT_GFS_at550 + 9.0 + + + + ORGANIC_CARBON_AER_OPT_GFS_at550 + 9.0 + + + + BLACK_CARBON_AER_OPT_GFS_at550 + 9.0 + + + + SDEN_ON_SURFACE + 6.0 + + @@ -1255,6 +1320,11 @@ 4.0 + + FDNSSTMP_ON_SURFACE + 4.0 + + TSOIL_ON_DEPTH_BEL_LAND_SFC 2 2 2 2 diff --git a/sorc/ufs_model.fd_gsl/FV3/upp/parm/postxconfig-NT-GFS-TWO.txt b/sorc/ufs_model.fd_gsl/FV3/upp/parm/postxconfig-NT-GFS-TWO.txt index a74acdcc00..a87b89c9b5 100644 --- a/sorc/ufs_model.fd_gsl/FV3/upp/parm/postxconfig-NT-GFS-TWO.txt +++ b/sorc/ufs_model.fd_gsl/FV3/upp/parm/postxconfig-NT-GFS-TWO.txt @@ -1,5 +1,5 @@ 2 -111 +112 204 GFSPRS 0 @@ -7840,6 +7840,43 @@ surface ? ? ? +549 +FDNSSTMP_ON_SURFACE +? +1 +tmpl4_0 +FDNSSTMP +? +? +surface +0 +? +0 +? +? +0 +? +0 +? +? +? +0 +0.0 +0 +0.0 +? +0 +0.0 +0 +0.0 +1 +4.0 +0 +0 +0 +? +? +? 116 TSOIL_ON_DEPTH_BEL_LAND_SFC ? diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/copy_coldstart_files.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/copy_coldstart_files.sh new file mode 100755 index 0000000000..0b290a963d --- /dev/null +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/copy_coldstart_files.sh @@ -0,0 +1,79 @@ +#!/bin/bash + +# Copy files from the working directory to the +# output directory. + +copy_data() +{ + + set -x + + MEM=$1 + + #JKHSAVEDIR_MODEL_DATA=$SUBDIR/model_data/atmos/input + SAVEDIR_MODEL_DATA=${ICSDIR}/${yy}${mm}${dd}${hh}/${CDUMP}/${CASE}/INPUT + mkdir -p $SAVEDIR_MODEL_DATA + cp gfs_ctrl.nc $SAVEDIR_MODEL_DATA + touch $SAVEDIR_MODEL_DATA/chgres_done ## JKH + + TMPDIR=$SUBDIR/model_data/atmos + mkdir -p $TMPDIR + ln -fs $SAVEDIR_MODEL_DATA $TMPDIR/input + + for tile in 'tile1' 'tile2' 'tile3' 'tile4' 'tile5' 'tile6' + do + cp out.atm.${tile}.nc ${SAVEDIR_MODEL_DATA}/gfs_data.${tile}.nc + cp out.sfc.${tile}.nc ${SAVEDIR_MODEL_DATA}/sfc_data.${tile}.nc + done + + if [ ${MEM} == 'gdas' ]; then + SAVEDIR_ANALYSIS=$SUBDIR/analysis/atmos + mkdir -p $SAVEDIR_ANALYSIS + cp ${INPUT_DATA_DIR}/*abias* $SAVEDIR_ANALYSIS/ + cp ${INPUT_DATA_DIR}/*radstat $SAVEDIR_ANALYSIS/ + fi +} + +set -x + +MEMBER=$1 +OUTDIR=$2 +yy=$3 +mm=$4 +dd=$5 +hh=$6 +INPUT_DATA_DIR=$7 +ICS_DIR=$8 + +if [ ${MEMBER} == 'hires' ]; then + MEMBER='gdas' +fi + +set +x +echo 'COPY DATA TO OUTPUT DIRECTORY' +set -x + +if [ ${MEMBER} == 'gdas' ] || [ ${MEMBER} == 'gfs' ]; then + SUBDIR=$OUTDIR/${MEMBER}.${yy}${mm}${dd}/${hh} + rm -fr $SUBDIR + copy_data ${MEMBER} +elif [ ${MEMBER} == 'enkf' ]; then # v16 retro data only. + MEMBER=1 + while [ $MEMBER -le 80 ]; do + if [ $MEMBER -lt 10 ]; then + MEMBER_CH="00${MEMBER}" + else + MEMBER_CH="0${MEMBER}" + fi + SUBDIR=$OUTDIR/enkfgdas.${yy}${mm}${dd}/${hh}/mem${MEMBER_CH} + rm -fr $SUBDIR + copy_data ${MEMBER} + MEMBER=$(( $MEMBER + 1 )) + done +else + SUBDIR=$OUTDIR/enkfgdas.${yy}${mm}${dd}/${hh}/mem${MEMBER} + rm -fr $SUBDIR + copy_data ${MEMBER} +fi + +exit 0 diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_pre-v14.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_pre-v14.chgres.sh new file mode 100755 index 0000000000..67fe1b3d83 --- /dev/null +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_pre-v14.chgres.sh @@ -0,0 +1,79 @@ +#!/bin/bash + +#---------------------------------------------------------------- +# Run chgres using pre-v14 gfs data (sigio/sfcio format +# from the spectral gfs). +#---------------------------------------------------------------- + +set -x + +MEMBER=$1 + +FIX_FV3=$UFS_DIR/fix +FIX_ORO=${FIX_FV3}/orog +FIX_AM=${FIX_FV3}/am + +WORKDIR=${WORKDIR:-$OUTDIR/work.${MEMBER}} + +if [ "${MEMBER}" = "gdas" ] || [ "${MEMBER}" = "gfs" ]; then + CTAR=${CRES_HIRES} + INPUT_DATA_DIR="${EXTRACT_DIR}/${MEMBER}.${yy}${mm}${dd}/${hh}" + if [ "${MEMBER}" = "gdas" ]; then + ATMFILE="gdas1.t${hh}z.sanl" + SFCFILE="gdas1.t${hh}z.sfcanl" + else + ATMFILE="gfs.t${hh}z.sanl" + SFCFILE="gfs.t${hh}z.sfcanl" + fi +else + CTAR=${CRES_ENKF} + INPUT_DATA_DIR="${EXTRACT_DIR}/enkf.${yy}${mm}${dd}/${hh}/mem${MEMBER}" + ATMFILE="siganl_${yy}${mm}${dd}${hh}_mem${MEMBER}" + SFCFILE="sfcanl_${yy}${mm}${dd}${hh}_mem${MEMBER}" +fi + +rm -fr $WORKDIR +mkdir -p $WORKDIR +cd $WORKDIR + +source $GDAS_INIT_DIR/set_fixed_files.sh + +cat << EOF > fort.41 + +&config + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" + orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" + orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" + data_dir_input_grid="${INPUT_DATA_DIR}" + atm_files_input_grid="$ATMFILE" + sfc_files_input_grid="$SFCFILE" + vcoord_file_target_grid="${FIX_AM}/global_hyblev.l${LEVS}.txt" + cycle_mon=$mm + cycle_day=$dd + cycle_hour=$hh + convert_atm=.true. + convert_sfc=.true. + convert_nst=.false. + input_type="gfs_sigio" + tracers_input="spfh","o3mr","clwmr" + tracers="sphum","o3mr","liq_wat" + thomp_mp_climo_file="${FIX_AM}/Thompson_MP_MONTHLY_CLIMO.nc" +/ +EOF + +$APRUN $EXEC_DIR/chgres_cube +rc=$? + +if [ $rc != 0 ]; then + exit $rc +fi + +$GDAS_INIT_DIR/copy_coldstart_files.sh $MEMBER $OUTDIR $yy $mm $dd $hh $INPUT_DATA_DIR $ICSDIR + +rm -fr $WORKDIR + +set +x +echo CHGRES COMPLETED FOR MEMBER $MEMBER + +exit 0 diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v14.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v14.chgres.sh new file mode 100755 index 0000000000..9b2ac08efb --- /dev/null +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v14.chgres.sh @@ -0,0 +1,76 @@ +#!/bin/bash + +#---------------------------------------------------------------- +# Run chgres using gfs v14 data as input. +#---------------------------------------------------------------- + +set -x + +MEMBER=$1 + +FIX_FV3=$UFS_DIR/fix +FIX_ORO=${FIX_FV3}/orog +FIX_AM=${FIX_FV3}/am + +WORKDIR=${WORKDIR:-$OUTDIR/work.${MEMBER}} + +if [ "${MEMBER}" = "gdas" ] || [ "${MEMBER}" = "gfs" ]; then + CTAR=${CRES_HIRES} + INPUT_DATA_DIR="${EXTRACT_DIR}/${MEMBER}.${yy}${mm}${dd}/${hh}" + ATMFILE="${MEMBER}.t${hh}z.atmanl.nemsio" + SFCFILE="${MEMBER}.t${hh}z.sfcanl.nemsio" + NSTFILE="${MEMBER}.t${hh}z.nstanl.nemsio" +else + CTAR=${CRES_ENKF} + INPUT_DATA_DIR="${EXTRACT_DIR}/enkf.${yy}${mm}${dd}/${hh}/mem${MEMBER}" + ATMFILE="gdas.t${hh}z.ratmanl.mem${MEMBER}.nemsio" + SFCFILE="gdas.t${hh}z.sfcanl.mem${MEMBER}.nemsio" + NSTFILE="gdas.t${hh}z.nstanl.mem${MEMBER}.nemsio" +fi + +rm -fr $WORKDIR +mkdir -p $WORKDIR +cd $WORKDIR + +source $GDAS_INIT_DIR/set_fixed_files.sh + +cat << EOF > fort.41 + +&config + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" + orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" + orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" + data_dir_input_grid="${INPUT_DATA_DIR}" + atm_files_input_grid="$ATMFILE" + sfc_files_input_grid="$SFCFILE" + nst_files_input_grid="$NSTFILE" + vcoord_file_target_grid="${FIX_AM}/global_hyblev.l${LEVS}.txt" + cycle_mon=$mm + cycle_day=$dd + cycle_hour=$hh + convert_atm=.true. + convert_sfc=.true. + convert_nst=.true. + input_type="gfs_gaussian_nemsio" + tracers="sphum","liq_wat","o3mr" + tracers_input="spfh","clwmr","o3mr" + thomp_mp_climo_file="${FIX_AM}/Thompson_MP_MONTHLY_CLIMO.nc" +/ +EOF + +$APRUN $EXEC_DIR/chgres_cube +rc=$? + +if [ $rc != 0 ]; then + exit $rc +fi + +$GDAS_INIT_DIR/copy_coldstart_files.sh $MEMBER $OUTDIR $yy $mm $dd $hh $INPUT_DATA_DIR $ICSDIR + +rm -fr $WORKDIR + +set +x +echo CHGRES COMPLETED FOR MEMBER $MEMBER + +exit 0 diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.gfs.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.gfs.sh index 5a8d5a6c51..6a9ef4ec7a 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.gfs.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.gfs.sh @@ -8,14 +8,13 @@ set -x FIX_FV3=$UFS_DIR/fix -FIX_ORO=${FIX_FV3}/fix_fv3_gmted2010 -FIX_AM=${FIX_FV3}/fix_am +FIX_ORO=${FIX_FV3}/orog +FIX_AM=${FIX_FV3}/am WORKDIR=${WORKDIR:-$OUTDIR/work.gfs} CTAR=${CRES_HIRES} INPUT_DATA_DIR="${EXTRACT_DIR}/gfs.${yy}${mm}${dd}/${hh}" -OUTDIR=$OUTDIR/gfs.${yy}${mm}${dd}/${hh}/atmos ATMFILE="gfs.t${hh}z.atmanl.nemsio" SFCFILE="gfs.t${hh}z.sfcanl.nemsio" @@ -23,17 +22,15 @@ rm -fr $WORKDIR mkdir -p $WORKDIR cd $WORKDIR -rm -fr $OUTDIR -mkdir -p $OUTDIR -mkdir -p $OUTDIR/INPUT +source $GDAS_INIT_DIR/set_fixed_files.sh cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${CTAR}/fix_sfc" - mosaic_file_target_grid="${FIX_ORO}/${CTAR}/${CTAR}_mosaic.nc" - orog_dir_target_grid="${FIX_ORO}/${CTAR}" - orog_files_target_grid="${CTAR}_oro_data.tile1.nc","${CTAR}_oro_data.tile2.nc","${CTAR}_oro_data.tile3.nc","${CTAR}_oro_data.tile4.nc","${CTAR}_oro_data.tile5.nc","${CTAR}_oro_data.tile6.nc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" + orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" + orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" data_dir_input_grid="${INPUT_DATA_DIR}" atm_files_input_grid="$ATMFILE" sfc_files_input_grid="$SFCFILE" @@ -51,22 +48,14 @@ cat << EOF > fort.41 / EOF -$APRUN $UFS_DIR/exec/chgres_cube +$APRUN $EXEC_DIR/chgres_cube rc=$? if [ $rc != 0 ]; then exit $rc fi -mv gfs_ctrl.nc ${OUTDIR}/INPUT - -for tile in 'tile1' 'tile2' 'tile3' 'tile4' 'tile5' 'tile6' -do - mv out.atm.${tile}.nc ${OUTDIR}/INPUT/gfs_data.${tile}.nc - mv out.sfc.${tile}.nc ${OUTDIR}/INPUT/sfc_data.${tile}.nc -done - -touch $OUTDIR/gfs.t${hh}z.loginc.txt +$GDAS_INIT_DIR/copy_coldstart_files.sh gfs $OUTDIR $yy $mm $dd $hh $INPUT_DATA_DIR $ICSDIR rm -fr $WORKDIR diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.sh index 0668fad48d..a5f0eeb41d 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v15.chgres.sh @@ -9,8 +9,8 @@ set -x MEMBER=$1 FIX_FV3=$UFS_DIR/fix -FIX_ORO=${FIX_FV3}/fix_fv3_gmted2010 -FIX_AM=${FIX_FV3}/fix_am +FIX_ORO=${FIX_FV3}/orog +FIX_AM=${FIX_FV3}/am date10=`$NDATE -6 $yy$mm$dd$hh` yy_d=$(echo $date10 | cut -c1-4) @@ -27,30 +27,26 @@ if [ ${MEMBER} == 'gdas' ]; then CTAR=${CRES_HIRES} INPUT_DATA_DIR="${EXTRACT_DIR}/gdas.${yy_d}${mm_d}${dd_d}/${hh_d}/RESTART" RADSTAT_DATA_DIR="${EXTRACT_DIR}/gdas.${yy}${mm}${dd}/${hh}" - OUTDIR=$OUTDIR/gdas.${yy}${mm}${dd}/${hh}/atmos else CINP=${CINP:-"C384"} CTAR=${CRES_ENKF} INPUT_DATA_DIR="${EXTRACT_DIR}/enkfgdas.${yy_d}${mm_d}${dd_d}/${hh_d}/mem${MEMBER}/RESTART" RADSTAT_DATA_DIR="${EXTRACT_DIR}/enkfgdas.${yy}${mm}${dd}/${hh}/mem${MEMBER}" - OUTDIR=$OUTDIR/enkfgdas.${yy}${mm}${dd}/${hh}/atmos/mem${MEMBER} fi rm -fr $WORKDIR mkdir -p $WORKDIR cd $WORKDIR -rm -fr $OUTDIR -mkdir -p $OUTDIR -mkdir -p $OUTDIR/INPUT +source $GDAS_INIT_DIR/set_fixed_files.sh cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${CTAR}/fix_sfc" - mosaic_file_target_grid="${FIX_ORO}/${CTAR}/${CTAR}_mosaic.nc" - orog_dir_target_grid="${FIX_ORO}/${CTAR}" - orog_files_target_grid="${CTAR}_oro_data.tile1.nc","${CTAR}_oro_data.tile2.nc","${CTAR}_oro_data.tile3.nc","${CTAR}_oro_data.tile4.nc","${CTAR}_oro_data.tile5.nc","${CTAR}_oro_data.tile6.nc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" + orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" + orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" mosaic_file_input_grid="${FIX_ORO}/${CINP}/${CINP}_mosaic.nc" orog_dir_input_grid="${FIX_ORO}/${CINP}" orog_files_input_grid="${CINP}_oro_data.tile1.nc","${CINP}_oro_data.tile2.nc","${CINP}_oro_data.tile3.nc","${CINP}_oro_data.tile4.nc","${CINP}_oro_data.tile5.nc","${CINP}_oro_data.tile6.nc" @@ -71,27 +67,14 @@ cat << EOF > fort.41 / EOF -$APRUN $UFS_DIR/exec/chgres_cube +$APRUN $EXEC_DIR/chgres_cube rc=$? if [ $rc != 0 ]; then exit $rc fi -mv gfs_ctrl.nc ${OUTDIR}/INPUT - -for tile in 'tile1' 'tile2' 'tile3' 'tile4' 'tile5' 'tile6' -do - mv out.atm.${tile}.nc ${OUTDIR}/INPUT/gfs_data.${tile}.nc - mv out.sfc.${tile}.nc ${OUTDIR}/INPUT/sfc_data.${tile}.nc -done - -if [ ${MEMBER} == 'gdas' ]; then - cp ${RADSTAT_DATA_DIR}/* $OUTDIR - touch $OUTDIR/gdas.t${hh}z.loginc.txt -else - touch $OUTDIR/enkfgdas.t${hh}z.loginc.txt -fi +$GDAS_INIT_DIR/copy_coldstart_files.sh $MEMBER $OUTDIR $yy $mm $dd $hh $RADSTAT_DATA_DIR $ICSDIR rm -fr $WORKDIR diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16.chgres.sh index 99f5049e18..34312b3210 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16.chgres.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16.chgres.sh @@ -1,18 +1,5 @@ #!/bin/bash -copy_data() -{ - -mkdir -p $SAVEDIR -cp gfs_ctrl.nc $SAVEDIR - -for tile in 'tile1' 'tile2' 'tile3' 'tile4' 'tile5' 'tile6' -do - cp out.atm.${tile}.nc ${SAVEDIR}/gfs_data.${tile}.nc - cp out.sfc.${tile}.nc ${SAVEDIR}/sfc_data.${tile}.nc -done -} - #--------------------------------------------------------------------------- # Run chgres using v16 netcdf history data as input. These history # files are part of the OPS v16 gfs/gdas/enkf tarballs, and the @@ -26,8 +13,8 @@ set -x MEMBER=$1 FIX_FV3=$UFS_DIR/fix -FIX_ORO=${FIX_FV3}/fix_fv3_gmted2010 -FIX_AM=${FIX_FV3}/fix_am +FIX_ORO=${FIX_FV3}/orog +FIX_AM=${FIX_FV3}/am WORKDIR=${WORKDIR:-$OUTDIR/work.${MEMBER}} @@ -59,13 +46,15 @@ rm -fr $WORKDIR mkdir -p $WORKDIR cd $WORKDIR +source $GDAS_INIT_DIR/set_fixed_files.sh + cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${CTAR}/fix_sfc" - mosaic_file_target_grid="${FIX_ORO}/${CTAR}/${CTAR}_mosaic.nc" - orog_dir_target_grid="${FIX_ORO}/${CTAR}" - orog_files_target_grid="${CTAR}_oro_data.tile1.nc","${CTAR}_oro_data.tile2.nc","${CTAR}_oro_data.tile3.nc","${CTAR}_oro_data.tile4.nc","${CTAR}_oro_data.tile5.nc","${CTAR}_oro_data.tile6.nc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" + orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" + orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" data_dir_input_grid="${INPUT_DATA_DIR}" atm_files_input_grid="${ATMFILE}" sfc_files_input_grid="${SFCFILE}" @@ -83,32 +72,14 @@ cat << EOF > fort.41 / EOF -$APRUN $UFS_DIR/exec/chgres_cube +$APRUN $EXEC_DIR/chgres_cube rc=$? if [ $rc != 0 ]; then exit $rc fi -if [ ${MEMBER} == 'gdas' ] || [ ${MEMBER} == 'gfs' ]; then - #SAVEDIR=$OUTDIR/${MEMBER}.${yy}${mm}${dd}/${hh}/atmos/INPUT - SAVEDIR=${ICSDIR}/${yy}${mm}${dd}${hh}/${CDUMP}/${CASE}/INPUT - copy_data - touch $SAVEDIR/../${MEMBER}.t${hh}z.loginc.txt - touch $SAVEDIR/chgres_done ## JKH - if [ ${MEMBER} == 'gdas' ]; then - cp ${INPUT_DATA_DIR}/*abias* $SAVEDIR/.. - cp ${INPUT_DATA_DIR}/*radstat $SAVEDIR/.. - fi - ## JKH - TMPDIR=$OUTDIR/${MEMBER}.${yy}${mm}${dd}/${hh}/atmos - mkdir -p $TMPDIR - ln -fs $SAVEDIR $TMPDIR -else - SAVEDIR=$OUTDIR/enkfgdas.${yy}${mm}${dd}/${hh}/atmos/mem${MEMBER}/INPUT - copy_data - touch $SAVEDIR/../enkfgdas.t${hh}z.loginc.txt -fi +$GDAS_INIT_DIR/copy_coldstart_files.sh $MEMBER $OUTDIR $yy $mm $dd $hh $INPUT_DATA_DIR $ICSDIR rm -fr $WORKDIR diff --git a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16retro.chgres.sh b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16retro.chgres.sh index 31ef92cdb4..a30257e0ce 100755 --- a/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16retro.chgres.sh +++ b/sorc/ufs_utils.fd_gsl/util/gdas_init/run_v16retro.chgres.sh @@ -1,18 +1,5 @@ #!/bin/bash -copy_data() -{ - -mkdir -p $SAVEDIR -cp gfs_ctrl.nc $SAVEDIR - -for tile in 'tile1' 'tile2' 'tile3' 'tile4' 'tile5' 'tile6' -do - cp out.atm.${tile}.nc ${SAVEDIR}/gfs_data.${tile}.nc - cp out.sfc.${tile}.nc ${SAVEDIR}/sfc_data.${tile}.nc -done -} - #--------------------------------------------------------------------------- # Run chgres for gdas/enkf members using v16 parallel data as input. # The enkf data is not saved. So the coldstart files for all @@ -29,8 +16,8 @@ set -x MEMBER=$1 FIX_FV3=$UFS_DIR/fix -FIX_ORO=${FIX_FV3}/fix_fv3_gmted2010 -FIX_AM=${FIX_FV3}/fix_am +FIX_ORO=${FIX_FV3}/orog +FIX_AM=${FIX_FV3}/am date10=`$NDATE -6 $yy$mm$dd$hh` yy_d=$(echo $date10 | cut -c1-4) @@ -69,13 +56,15 @@ rm -fr $WORKDIR mkdir -p $WORKDIR cd $WORKDIR +source $GDAS_INIT_DIR/set_fixed_files.sh + cat << EOF > fort.41 &config - fix_dir_target_grid="${FIX_ORO}/${CTAR}/fix_sfc" - mosaic_file_target_grid="${FIX_ORO}/${CTAR}/${CTAR}_mosaic.nc" - orog_dir_target_grid="${FIX_ORO}/${CTAR}" - orog_files_target_grid="${CTAR}_oro_data.tile1.nc","${CTAR}_oro_data.tile2.nc","${CTAR}_oro_data.tile3.nc","${CTAR}_oro_data.tile4.nc","${CTAR}_oro_data.tile5.nc","${CTAR}_oro_data.tile6.nc" + fix_dir_target_grid="${FIX_ORO}/${ORO_DIR}/fix_sfc" + mosaic_file_target_grid="${FIX_ORO}/${ORO_DIR}/${CTAR}_mosaic.nc" + orog_dir_target_grid="${FIX_ORO}/${ORO_DIR}" + orog_files_target_grid="${ORO_NAME}.tile1.nc","${ORO_NAME}.tile2.nc","${ORO_NAME}.tile3.nc","${ORO_NAME}.tile4.nc","${ORO_NAME}.tile5.nc","${ORO_NAME}.tile6.nc" mosaic_file_input_grid="${FIX_ORO}/${CINP}/${CINP}_mosaic.nc" orog_dir_input_grid="${FIX_ORO}/${CINP}" orog_files_input_grid="${CINP}_oro_data.tile1.nc","${CINP}_oro_data.tile2.nc","${CINP}_oro_data.tile3.nc","${CINP}_oro_data.tile4.nc","${CINP}_oro_data.tile5.nc","${CINP}_oro_data.tile6.nc" @@ -96,33 +85,14 @@ cat << EOF > fort.41 / EOF -$APRUN $UFS_DIR/exec/chgres_cube +$APRUN $EXEC_DIR/chgres_cube rc=$? if [ $rc != 0 ]; then exit $rc fi -if [ ${MEMBER} == 'hires' ]; then - SAVEDIR=$OUTDIR/gdas.${yy}${mm}${dd}/${hh}/atmos/INPUT - copy_data - cp $RADSTAT_DATA_DIR/*abias* $SAVEDIR/.. - cp $RADSTAT_DATA_DIR/*radstat $SAVEDIR/.. - touch $SAVEDIR/../gdas.t${hh}z.loginc.txt -else - MEMBER=1 - while [ $MEMBER -le 80 ]; do - if [ $MEMBER -lt 10 ]; then - MEMBER_CH="00${MEMBER}" - else - MEMBER_CH="0${MEMBER}" - fi - SAVEDIR=$OUTDIR/enkfgdas.${yy}${mm}${dd}/${hh}/atmos/mem${MEMBER_CH}/INPUT - copy_data - touch $SAVEDIR/../enkfgdas.t${hh}z.loginc.txt - MEMBER=$(( $MEMBER + 1 )) - done -fi +$GDAS_INIT_DIR/copy_coldstart_files.sh $MEMBER $OUTDIR $yy $mm $dd $hh $RADSTAT_DATA_DIR $ICSDIR rm -fr $WORKDIR diff --git a/sorc/vint.fd/makefile b/sorc/vint.fd/makefile deleted file mode 100755 index 06647d1fc6..0000000000 --- a/sorc/vint.fd/makefile +++ /dev/null @@ -1,27 +0,0 @@ -SHELL= /bin/sh -ISIZE = 4 -RSIZE = 8 -COMP= ifort -##INC = /contrib/nceplibs/nwprod/lib/incmod/g2_d -##LIBS= -L/contrib/nceplibs/nwprod/lib -lw3emc_d -lw3nco_d -lg2_d -lbacio_4 -ljasper -lpng -lz -LDFLAGS= -# FFLAGS= -O3 -I $(INC) -i$(ISIZE) -r$(RSIZE) -# DEBUG= -check all -debug all -traceback -FFLAGS= -O2 -g -traceback -I $(INC) -i$(ISIZE) -r$(RSIZE) - -vint: vint.f - @echo " " - @echo " Compiling the interpolation program....." - $(COMP) $(FFLAGS) $(LDFLAGS) vint.f $(LIBS) -o vint.x - @echo " " - -.PHONY: clean - -CMD = vint.x - -clean: - -rm -f *.o *.mod - -install: - mv $(CMD) ../../exec/$(CMD) - diff --git a/sorc/vint.fd/vint.f b/sorc/vint.fd/vint.f deleted file mode 100755 index e4d6db807c..0000000000 --- a/sorc/vint.fd/vint.f +++ /dev/null @@ -1,1239 +0,0 @@ - program vint -c -c ABSTRACT: This program interpolates from various pressure levels -c onto regularly-spaced, 50-mb vertical levels. The intent is that -c we can use data with relatively coarse vertical resolution to -c get data on the necessary 50-mb intervals that we need for Bob -c Hart's cyclone phase space. For each model, we will need to read -c in a control file that contains the levels that we are -c interpolating from. -c -c Written by Tim Marchok - - USE params - USE grib_mod - - implicit none - - type(gribfield) :: holdgfld - integer, parameter :: lugb=11,lulv=16,lugi=31,lout=51,maxlev=200 - integer kpds(200),kgds(200) - integer nlevsin,iriret,iogret,kf,iggret,igdret,iidret,ixo,k,n - integer iha,iho,iva,irfa,iodret,ifcsthour,iia,iparm,nlevsout - integer gribver,g2_jpdtn - integer ilevs(maxlev) - real, allocatable :: xinpdat(:,:),xoutdat(:,:),xoutlevs_p(:) - logical(1), allocatable :: valid_pt(:),readflag(:) - - namelist/timein/ifcsthour,iparm,gribver,g2_jpdtn -c - read (5,NML=timein,END=201) - 201 continue - print *,' ' - print *,'*----------------------------------------------------*' - print *,' ' - print *,' +++ Top of vint +++' - print *,' ' - print *,'After namelist read, input forecast hour = ',ifcsthour - print *,' input grib parm = ',iparm - print *,' GRIB version= ',gribver - print *,' GRIB2 JPDTN= g2_jpdtn= ' - & ,g2_jpdtn - - if (iparm == 7 .or. iparm == 156) then - nlevsout = 13 ! dealing with height - else - nlevsout = 5 ! dealing with temperature - endif - - allocate (xoutlevs_p(nlevsout),stat=ixo) - if (ixo /= 0) then - print *,' ' - print *,'!!! ERROR in vint allocating the xoutlevs_p array.' - print *,'!!! ixo= ',ixo - print *,' ' - goto 899 - endif - - do k = 1,nlevsout - xoutlevs_p(k) = 300. + float((k-1)*50) - enddo - - ilevs = -999 - call read_input_levels (lulv,maxlev,nlevsin,ilevs,iriret) - - if (iriret /= 0) then - print *,' ' - print *,'!!! ERROR in vint. ' - print *,'!!! RETURN CODE FROM read_input_levels /= 0' - print *,'!!! RETURN CODE = iriret = ',iriret - print *,'!!! EXITING....' - print *,' ' - goto 899 - endif - - call open_grib_files (lugb,lugi,lout,gribver,iogret) - - if (iogret /= 0) then - print '(/,a45,i4,/)','!!! ERROR: in vint open_grib_files, rc= ' - & ,iogret - goto 899 - endif - - call getgridinfo (lugb,lugi,kf,kpds,kgds,holdgfld,ifcsthour,iparm - & ,gribver,g2_jpdtn,iggret) - - allocate (xinpdat(kf,nlevsin),stat=iha) - allocate (xoutdat(kf,nlevsout),stat=iho) - allocate (valid_pt(kf),stat=iva) - allocate (readflag(nlevsin),stat=irfa) - if (iha /= 0 .or. iho /= 0 .or. iva /= 0 .or. irfa /= 0) then - print *,' ' - print *,'!!! ERROR in vint.' - print *,'!!! ERROR allocating the xinpdat, readflag, or the' - print *,'!!! valid_pt array, iha= ',iha,' iva= ',iva - print *,'!!! irfa= ',irfa,' iho= ',iho - print *,' ' - goto 899 - endif - - print *,'hold check, holdgfld%ipdtlen = ',holdgfld%ipdtlen - do n = 1,holdgfld%ipdtlen - print *,'hold check, n= ',n,' holdgfld%ipdtmpl= ' - & ,holdgfld%ipdtmpl(n) - enddo - - call getdata (lugb,lugi,kf,valid_pt,nlevsin,ilevs,maxlev - & ,readflag,xinpdat,ifcsthour,iparm,gribver,g2_jpdtn - & ,igdret) - - call interp_data (kf,valid_pt,nlevsin,ilevs,maxlev,readflag - & ,xinpdat,xoutdat,xoutlevs_p,nlevsout,iidret) - - call output_data (lout,kf,kpds,kgds,holdgfld,xoutdat,valid_pt - & ,xoutlevs_p,nlevsout,gribver,iodret) - - deallocate (xinpdat) - deallocate (xoutdat) - deallocate (valid_pt) - deallocate (readflag) - deallocate (xoutlevs_p) - - 899 continue -c - stop - end -c -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine read_input_levels (lulv,maxlev,nlevsin,ilevs,iriret) -c -c ABSTRACT: This subroutine reads in a text file that contains -c the number of input pressure levels for a given model. The -c format of the file goes like this, from upper levels to -c lower, for example: -c -c 1 200 -c 2 400 -c 3 500 -c 4 700 -c 5 850 -c 6 925 -c 7 1000 -c -c - implicit none - - integer lulv,nlevsin,maxlev,iriret,inplev,ict,lvix - integer ilevs(maxlev) -c - iriret=0 - ict = 0 - do while (.true.) - - print *,'Top of while loop in vint read_input_levels' - - read (lulv,85,end=130) lvix,inplev - - if (inplev > 0 .and. inplev <= 1000) then - ict = ict + 1 - ilevs(ict) = inplev - else - print *,' ' - print *,'!!! ERROR: Input level not between 0 and 1000' - print *,'!!! in vint. inplev= ',inplev - print *,'!!! STOPPING EXECUTION' - STOP 91 - endif - - print *,'vint readloop, ict= ',ict,' inplev= ',inplev - - enddo - - 85 format (i4,1x,i4) - 130 continue - - nlevsin = ict - - print *,' ' - print *,'Total number of vint levels read in = ',nlevsin -c - return - end - -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine getgridinfo (lugb,lugi,kf,kpds,kgds,holdgfld,ifcsthour - & ,iparm,gribver,g2_jpdtn,iggret) -c -c ABSTRACT: The purpose of this subroutine is just to get the max -c values of i and j and the dx and dy grid spacing intervals for the -c grid to be used in the rest of the program. So just read the -c grib file to get the lon and lat data. Also, get the info for -c the data grid's boundaries. This boundary information will be -c used later in the tracking algorithm, and is accessed via Module -c grid_bounds. -c -C INPUT: -C lugb The Fortran unit number for the GRIB data file -C lugi The Fortran unit number for the GRIB index file -c ifcsthour input forecast hour to search for -c iparm input grib parm to search for -c gribver integer (1 or 2) to indicate if using GRIB1 / GRIB2 -c g2_jpdtn If GRIB2 data being read, this is the value for JPDTN -c that is input to getgb2. -C -C OUTPUT: -c kf Number of gridpoints on the grid -c kpds pds array for a GRIB1 record -c kgds gds array for a GRIB1 record -c holdgfld info for a GRIB2 record -c -C iggret The return code from this subroutine -c - USE params - USE grib_mod - - implicit none -c - type(gribfield) :: gfld,prevfld,holdgfld - integer,dimension(200) :: jids,jpdt,jgdt - logical(1), allocatable :: lb(:) - integer, parameter :: jf=4000000 - integer jpds(200),jgds(200) - integer kpds(200),kgds(200) - integer :: listsec1(13) - integer ila,ifa,iret,ifcsthour,imax,jmax,jskp,jdisc - integer lugb,lugi,kf,j,k,iggret,iparm,gribver,g2_jpdtn - integer jpdtn,jgdtn,npoints,icount,ipack,krec - integer :: listsec0(2)=(/0,2/) - integer :: igds(5)=(/0,0,0,0,0/),previgds(5) - integer :: idrstmpl(200) - integer :: currlen=1000000 - logical :: unpack=.true. - logical :: open_grb=.false. - real, allocatable :: f(:) - real dx,dy -c - iggret = 0 - - allocate (lb(jf),stat=ila) - allocate (f(jf),stat=ifa) - if (ila /= 0 .or. ifa /= 0) then - print *,' ' - print *,'!!! ERROR in vint.' - print *,'!!! ERROR in getgridinfo allocating either lb or f' - print *,'!!! ila = ',ila,' ifa= ',ifa - iggret = 97 - return - endif - - if (gribver == 2) then - - ! Search for a record from a GRIB2 file - - ! - ! --- Initialize Variables --- - ! - - gfld%idsect => NULL() - gfld%local => NULL() - gfld%list_opt => NULL() - gfld%igdtmpl => NULL() - gfld%ipdtmpl => NULL() - gfld%coord_list => NULL() - gfld%idrtmpl => NULL() - gfld%bmap => NULL() - gfld%fld => NULL() - - jdisc=0 ! meteorological products - jids=-9999 - jpdtn=g2_jpdtn ! 0 = analysis or forecast; 1 = ens fcst - jgdtn=0 ! lat/lon grid - jgdt=-9999 - jpdt=-9999 - - npoints=0 - icount=0 - jskp=0 - -c Search for Temperature or GP Height by production template.... - - JPDT(1:15)=(/-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999 - & ,-9999,-9999,-9999,-9999,-9999,-9999,-9999/) - - if (iparm == 7) then ! GP Height - jpdt(1) = 3 ! Param category from Table 4.1 - jpdt(2) = 5 ! Param number from Table 4.2-0-3 - elseif (iparm == 11) then ! Temperature - jpdt(1) = 0 ! Param category from Table 4.1 - jpdt(2) = 0 ! Param category from Table 4.2 - endif - - jpdt(9) = ifcsthour - - call getgb2(lugb,lugi,jskp,jdisc,jids,jpdtn,jpdt,jgdtn,jgdt - & ,unpack,krec,gfld,iret) - if ( iret.ne.0) then - print *,' ' - print *,' ERROR: getgb2 error in getgridinfo = ',iret - endif - -c Determine packing information from GRIB2 file -c The default packing is 40 JPEG 2000 - - ipack = 40 - - print *,' gfld%idrtnum = ', gfld%idrtnum - - ! Set DRT info ( packing info ) - if ( gfld%idrtnum.eq.0 ) then ! Simple packing - ipack = 0 - elseif ( gfld%idrtnum.eq.2 ) then ! Complex packing - ipack = 2 - elseif ( gfld%idrtnum.eq.3 ) then ! Complex & spatial packing - ipack = 31 - elseif ( gfld%idrtnum.eq.40.or.gfld%idrtnum.eq.15 ) then - ! JPEG 2000 packing - ipack = 40 - elseif ( gfld%idrtnum.eq.41 ) then ! PNG packing - ipack = 41 - endif - - print *,'After check of idrtnum, ipack= ',ipack - - print *,'Number of gridpts= gfld%ngrdpts= ',gfld%ngrdpts - print *,'Number of elements= gfld%igdtlen= ',gfld%igdtlen - print *,'PDT num= gfld%ipdtnum= ',gfld%ipdtnum - print *,'GDT num= gfld%igdtnum= ',gfld%igdtnum - - imax = gfld%igdtmpl(8) - print *,'at A' - jmax = gfld%igdtmpl(9) - print *,'at B' - dx = float(gfld%igdtmpl(17))/1.e6 - print *,'at C' - dy = float(gfld%igdtmpl(17))/1.e6 - print *,'at D' - kf = gfld%ngrdpts - print *,'at E' - - holdgfld = gfld - - else - - ! Search for a record from a GRIB1 file - - jpds = -1 - jgds = -1 - - j=0 - - jpds(5) = iparm ! Get a record for the input parm selected - jpds(6) = 100 ! Get a record on a standard pressure level - jpds(14) = ifcsthour - - call getgb(lugb,lugi,jf,j,jpds,jgds, - & kf,k,kpds,kgds,lb,f,iret) - - if (iret.ne.0) then - print *,' ' - print *,'!!! ERROR in vint getgridinfo calling getgb' - print *,'!!! Return code from getgb = iret = ',iret - iggret = iret - return - else - iggret=0 - imax = kgds(2) - jmax = kgds(3) - dx = float(kgds(9))/1000. - dy = float(kgds(10))/1000. - endif - - endif - - print *,' ' - print *,'In vint getgridinfo, grid dimensions follow:' - print *,'imax= ',imax,' jmax= ',jmax - print *,' dx= ',dx,' dy= ',dy - print *,'number of gridpoints = ',kf - - deallocate (lb); deallocate(f) - - return - end - -c--------------------------------------------------------------------- -c -c--------------------------------------------------------------------- - subroutine getdata (lugb,lugi,kf,valid_pt,nlevsin,ilevs,maxlev - & ,readflag,xinpdat,ifcsthour,iparm,gribver,g2_jpdtn - & ,igdret) -c -c ABSTRACT: This subroutine reads the input GRIB file for the -c tracked parameters. - - USE params - USE grib_mod - - implicit none -c - type(gribfield) :: gfld,prevfld - CHARACTER(len=8) :: pabbrev - integer,dimension(200) :: jids,jpdt,jgdt - logical(1) valid_pt(kf),lb(kf),readflag(nlevsin) - integer, parameter :: jf=4000000 - integer ilevs(maxlev) - integer jpds(200),jgds(200),kpds(200),kgds(200) - integer lugb,lugi,kf,nlevsin,maxlev,igdret,jskp,jdisc - integer i,j,k,ict,np,lev,ifcsthour,iret,iparm,gribver,g2_jpdtn - integer jpdtn,jgdtn,npoints,icount,ipack,krec - integer pdt_4p0_vert_level,pdt_4p0_vtime,mm - integer :: listsec0(2)=(/0,2/) - integer :: listsec1(13) - integer :: igds(5)=(/0,0,0,0,0/),previgds(5) - integer :: idrstmpl(200) - integer :: currlen=1000000 - logical :: unpack=.true. - logical :: open_grb=.false. - real f(kf),xinpdat(kf,nlevsin),xtemp(kf) - real dmin,dmax,firstval,lastval -c - igdret=0 - ict = 0 - - level_loop: do lev = 1,nlevsin - - print *,' ' - print *,'In vint getdata read loop, lev= ',lev,' level= ' - & ,ilevs(lev) - - if (gribver == 2) then - - ! - ! --- Initialize Variables --- - ! - - gfld%idsect => NULL() - gfld%local => NULL() - gfld%list_opt => NULL() - gfld%igdtmpl => NULL() - gfld%ipdtmpl => NULL() - gfld%coord_list => NULL() - gfld%idrtmpl => NULL() - gfld%bmap => NULL() - gfld%fld => NULL() - - jdisc=0 ! meteorological products - jids=-9999 - jpdtn=g2_jpdtn ! 0 = analysis or forecast; 1 = ens fcst - jgdtn=0 ! lat/lon grid - jgdt=-9999 - jpdt=-9999 - - npoints=0 - icount=0 - jskp=0 - -c Search for input parameter by production template 4.0. This -c vint program is used primarily for temperature, but still we -c will leave that as a variable and not-hard wire it in case we -c choose to average something else in the future. - - ! We are looking for Temperature or GP Height here. This - ! block of code, or even the smaller subset block of code that - ! contains the JPDT(1) and JPDT(2) assignments, can of course - ! be modified if this program is to be used for interpolating - ! other variables.... - - ! Set defaults for JPDT, then override in array - ! assignments below... - - JPDT(1:15)=(/-9999,-9999,-9999,-9999,-9999,-9999,-9999,-9999 - & ,-9999,-9999,-9999,-9999,-9999,-9999,-9999/) - - print *,' ' - print *,'In getdata vint, iparm= ',iparm - - if (iparm == 7) then ! GP Height - jpdt(1) = 3 ! Param category from Table 4.1 - jpdt(2) = 5 ! Param number from Table 4.2-0-3 - elseif (iparm == 11) then ! Temperature - jpdt(1) = 0 ! Param category from Table 4.1 - jpdt(2) = 0 ! Param category from Table 4.2 - endif - - JPDT(9) = ifcsthour - JPDT(10) = 100 ! Isobaric surface requested (Table 4.5) - JPDT(12) = ilevs(lev) * 100 ! value of specific level - - print *,'before getgb2 call, value of unpack = ',unpack - - do mm = 1,15 - print *,'VINT getdata mm= ',mm,' JPDT(mm)= ',JPDT(mm) - enddo - - call getgb2(lugb,lugi,jskp,jdisc,jids,jpdtn,jpdt,jgdtn,jgdt - & ,unpack,krec,gfld,iret) - - print *,'iret from getgb2 in getdata = ',iret - - print *,'after getgb2 call, value of unpacked = ' - & ,gfld%unpacked - - print *,'after getgb2 call, gfld%ndpts = ',gfld%ndpts - print *,'after getgb2 call, gfld%ibmap = ',gfld%ibmap - - if ( iret == 0) then - -c Determine packing information from GRIB2 file -c The default packing is 40 JPEG 2000 - - ipack = 40 - - print *,' gfld%idrtnum = ', gfld%idrtnum - - ! Set DRT info ( packing info ) - if ( gfld%idrtnum.eq.0 ) then ! Simple packing - ipack = 0 - elseif ( gfld%idrtnum.eq.2 ) then ! Complex packing - ipack = 2 - elseif ( gfld%idrtnum.eq.3 ) then ! Complex & spatial - & ! packing - ipack = 31 - elseif ( gfld%idrtnum.eq.40.or.gfld%idrtnum.eq.15 ) then - ! JPEG 2000 packing - ipack = 40 - elseif ( gfld%idrtnum.eq.41 ) then ! PNG packing - ipack = 41 - endif - - print *,'After check of idrtnum, ipack= ',ipack - - print *,'Number of gridpts= gfld%ngrdpts= ',gfld%ngrdpts - print *,'Number of elements= gfld%igdtlen= ',gfld%igdtlen - print *,'GDT num= gfld%igdtnum= ',gfld%igdtnum - - kf = gfld%ndpts ! Number of gridpoints returned from read - - do np = 1,kf - xinpdat(np,lev) = gfld%fld(np) - xtemp(np) = gfld%fld(np) - if (gfld%ibmap == 0) then - valid_pt(np) = gfld%bmap(np) - else - valid_pt(np) = .true. - endif - enddo - - readflag(lev) = .TRUE. -c call bitmapchk(kf,gfld%bmap,gfld%fld,dmin,dmax) - call bitmapchk(kf,valid_pt,xtemp,dmin,dmax) - - if (ict == 0) then -c do np = 1,kf -c valid_pt(np) = gfld%bmap(np) -c enddo - ict = ict + 1 - endif - - firstval=gfld%fld(1) - lastval=gfld%fld(kf) - - print *,' ' - print *,' SECTION 0: discipl= ',gfld%discipline - & ,' gribver= ',gfld%version - print *,' ' - print *,' SECTION 1: ' - - do j = 1,gfld%idsectlen - print *,' sect1, j= ',j,' gfld%idsect(j)= ' - & ,gfld%idsect(j) - enddo - - if ( associated(gfld%local).AND.gfld%locallen.gt.0) then - print *,' ' - print *,' SECTION 2: ',gfld%locallen,' bytes' - else - print *,' ' - print *,' SECTION 2 DOES NOT EXIST IN THIS RECORD' - endif - - print *,' ' - print *,' SECTION 3: griddef= ',gfld%griddef - print *,' ngrdpts= ',gfld%ngrdpts - print *,' numoct_opt= ',gfld%numoct_opt - print *,' interp_opt= ',gfld%interp_opt - print *,' igdtnum= ',gfld%igdtnum - print *,' igdtlen= ',gfld%igdtlen - - print *,' ' - print '(a17,i3,a2)',' GRID TEMPLATE 3.',gfld%igdtnum,': ' - do j=1,gfld%igdtlen - print *,' j= ',j,' gfld%igdtmpl(j)= ',gfld%igdtmpl(j) - enddo - - print *,' ' - print *,' PDT num (gfld%ipdtnum) = ',gfld%ipdtnum - print *,' ' - print '(a20,i3,a2)',' PRODUCT TEMPLATE 4.',gfld%ipdtnum,': ' - do j=1,gfld%ipdtlen - print *,' sect 4 j= ',j,' gfld%ipdtmpl(j)= ' - & ,gfld%ipdtmpl(j) - enddo - -c Print out values for data representation type - - print *,' ' - print '(a21,i3,a2)',' DATA REP TEMPLATE 5.',gfld%idrtnum - & ,': ' - do j=1,gfld%idrtlen - print *,' sect 5 j= ',j,' gfld%idrtmpl(j)= ' - & ,gfld%idrtmpl(j) - enddo - -c Get parameter abbrev for record that was retrieved - - pdt_4p0_vtime = gfld%ipdtmpl(9) - pdt_4p0_vert_level = gfld%ipdtmpl(12) - - pabbrev=param_get_abbrev(gfld%discipline,gfld%ipdtmpl(1) - & ,gfld%ipdtmpl(2)) - - print *,' ' - write (6,131) - 131 format (' rec# param level byy bmm bdd bhh ' - & ,'fhr npts firstval lastval minval ' - & ,' maxval') - print '(i5,3x,a8,2x,6i5,2x,i8,4g12.4)' - & ,krec,pabbrev,pdt_4p0_vert_level/100,gfld%idsect(6) - & ,gfld%idsect(7),gfld%idsect(8),gfld%idsect(9) - & ,pdt_4p0_vtime,gfld%ndpts,firstval,lastval,dmin,dmax - - do np = 1,kf - xinpdat(np,lev) = gfld%fld(np) - enddo - - else - - print *,' ' - print *,'!!! ERROR: GRIB2 VINT READ IN GETDATA FAILED FOR ' - & ,'LEVEL LEV= ',LEV - print *,' ' - - readflag(lev) = .FALSE. - - do np = 1,kf - xinpdat(np,lev) = -99999.0 - enddo - - endif - - else - - ! Reading a GRIB1 file.... - - jpds = -1 - jgds = -1 - j=0 - - jpds(5) = iparm ! grib parameter id to read in - jpds(6) = 100 ! level id to indicate a pressure level - jpds(7) = ilevs(lev) ! actual level of the layer - jpds(14) = ifcsthour ! lead time to search for - - call getgb (lugb,lugi,jf,j,jpds,jgds, - & kf,k,kpds,kgds,lb,f,iret) - - print *,' ' - print *,'After vint getgb call, j= ',j,' k= ',k,' level= ' - & ,ilevs(lev),' iret= ',iret - - if (iret == 0) then - - readflag(lev) = .TRUE. - call bitmapchk(kf,lb,f,dmin,dmax) - - if (ict == 0) then - do np = 1,kf - valid_pt(np) = lb(np) - enddo - ict = ict + 1 - endif - - write (6,31) - 31 format (' rec# parm# levt lev byy bmm bdd bhh fhr ' - & ,'npts minval maxval') - print '(i4,2x,8i5,i8,2g12.4)', - & k,(kpds(i),i=5,11),kpds(14),kf,dmin,dmax - - do np = 1,kf - xinpdat(np,lev) = f(np) - enddo - - else - - print *,' ' - print *,'!!! ERROR: VINT READ FAILED FOR LEVEL LEV= ',LEV - print *,' ' - - readflag(lev) = .FALSE. - - do np = 1,kf - xinpdat(np,lev) = -99999.0 - enddo - - endif - - endif - - enddo level_loop -c - return - end -c -c----------------------------------------------------------------------- -c -c----------------------------------------------------------------------- - subroutine interp_data (kf,valid_pt,nlevsin,ilevs,maxlev,readflag - & ,xinpdat,xoutdat,xoutlevs_p,nlevsout,iidret) -c -c ABSTRACT: This routine interpolates data in between available -c pressure levels to get data resolution at the 50-mb -c resolution that we need for the cyclone phase space -c diagnostics. - - implicit none - - logical(1) valid_pt(kf),readflag(nlevsin) - integer ilevs(maxlev) - integer nlevsin,nlevsout,maxlev,kf,kout,kin,k,n,kup,klo - integer iidret - real xinpdat(kf,nlevsin),xoutdat(kf,nlevsout) - real xoutlevs_p(nlevsout),xoutlevs_lnp(nlevsout) - real xinlevs_p(nlevsin),xinlevs_lnp(nlevsin) - real pdiff,pdiffmin,xu,xo,xl,yu,yl -c - iidret=0 - print *,' ' - print *,'*----------------------------------------------*' - print *,' Listing of standard output levels follows....' - print *,'*----------------------------------------------*' - print *,' ' - - do k = 1,nlevsout - xoutlevs_lnp(k) = log(xoutlevs_p(k)) - write (6,81) k,xoutlevs_p(k),xoutlevs_lnp(k) - enddo - 81 format (1x,'k= ',i3,' p= ',f6.1,' ln(p)= ',f9.6) - - do k = 1,nlevsin - xinlevs_p(k) = float(ilevs(k)) - xinlevs_lnp(k) = log(xinlevs_p(k)) - enddo - -c ----------------------------------------------------------------- -c We want to loop through for all the *output* levels that we need. -c We may have some input levels that match perfectly, often at -c least the standard levels like 500, 700, 850. For these levels, -c just take the data directly from the input file. For other -c output levels that fall between the input levels, we need to -c find the nearest upper and lower levels. - - output_loop: do kout = 1,nlevsout - - print *,' ' - print *,'+------------------------------------------------+' - print *,'Top of vint output_loop, kout= ',kout,' pressure= ' - & ,xoutlevs_p(kout) - - ! Loop through all of the input levels and find the level - ! that is closest to the output level from the *upper* side. - ! And again, in this upper loop, if we hit a level that - ! exactly matches a needed output level, just copy that data - ! and then cycle back to the top of output_loop. - - kup = -999 - klo = -999 - - pdiffmin = 9999.0 - - inp_loop_up: do kin = 1,nlevsin - if (xinlevs_p(kin) == xoutlevs_p(kout)) then - print *,' ' - print *,'+++ Exact level found. kout= ',kout - print *,'+++ level= ',xoutlevs_p(kout) - print *,'+++ Data copied. No interpolation needed.' - if (readflag(kin)) then - do n = 1,kf - xoutdat(n,kout) = xinpdat(n,kin) - enddo - cycle output_loop - else - print *,' ' - print *,'!!! ERROR: readflag is FALSE in interp_data for' - print *,'!!! level kin= ',kin,', which is a level that ' - print *,'!!! exactly matches a required output level, and' - print *,'!!! the user has identified as being an input ' - print *,'!!! level with valid data for this model. We ' - print *,'!!! will get the data from a different level.' - endif - else - pdiff = xoutlevs_p(kout) - xinlevs_p(kin) - if (pdiff > 0.) then ! We have a level higher than outlev - if (pdiff < pdiffmin) then - pdiffmin = pdiff - kup = kin - endif - endif - endif - enddo inp_loop_up - - pdiffmin = 9999.0 - - inp_loop_lo: do kin = 1,nlevsin - pdiff = xinlevs_p(kin) - xoutlevs_p(kout) - if (pdiff > 0.) then ! We have a level lower than outlev - if (pdiff < pdiffmin) then - pdiffmin = pdiff - klo = kin - endif - endif - enddo inp_loop_lo - - if (kup == -999 .or. klo == -999) then - print *,' ' - print *,'!!! ERROR: While interpolating, could not find ' - print *,'!!! either an upper or lower input level to use' - print *,'!!! for interpolating *from*.' - print *,'!!! kup= ',kup,' klo= ',klo - print *,' ' - print *,'!!! STOPPING....' - stop 91 - endif - - if (.not. readflag(kup) .or. .not. readflag(klo)) then - print *,' ' - print *,'!!! ERROR: In interp_data, either the upper or the' - print *,'!!! lower input level closest to the target output' - print *,'!!! level did not have valid data read in.' - print *,'!!! ' - write (6,91) ' upper level k= ',kup,xinlevs_p(kup) - & ,xinlevs_lnp(kup) - write (6,101) xoutlevs_p(kout),xoutlevs_lnp(kout) - write (6,91) ' lower level k= ',klo,xinlevs_p(klo) - & ,xinlevs_lnp(klo) - print *,'!!! readflag upper = ',readflag(kup) - print *,'!!! readflag lower = ',readflag(klo) - print *,'!!! EXITING....' - stop 92 - endif - - print *,' ' - write (6,91) ' upper level k= ',kup,xinlevs_p(kup) - & ,xinlevs_lnp(kup) - write (6,101) xoutlevs_p(kout),xoutlevs_lnp(kout) - write (6,91) ' lower level k= ',klo,xinlevs_p(klo) - & ,xinlevs_lnp(klo) - - 91 format (1x,a17,1x,i3,' pressure= ',f6.1,' ln(p)= ',f9.6) - 101 format (13x,'Target output pressure= ',f6.1,' ln(p)= ',f9.6) - - !-------------------------------------------------------------- - ! Now perform the linear interpolation. Here is the notation - ! used in the interpolation: - ! - ! xu = ln of pressure at upper level - ! xo = ln of pressure at output level - ! xl = ln of pressure at lower level - ! yu = data value at upper level - ! yl = data value at lower level - !-------------------------------------------------------------- - - xu = xinlevs_lnp(kup) - xo = xoutlevs_lnp(kout) - xl = xinlevs_lnp(klo) - - do n = 1,kf - yu = xinpdat(n,kup) - yl = xinpdat(n,klo) - xoutdat(n,kout) = ((yl * (xo - xu)) - (yu * (xo - xl))) - & / (xl - xu) - enddo - - enddo output_loop -c - return - end -c -c---------------------------------------------------------------------- -c -c---------------------------------------------------------------------- - subroutine output_data (lout,kf,kpds,kgds,holdgfld,xoutdat - & ,valid_pt,xoutlevs_p,nlevsout,gribver,iodret) -c -c ABSTRACT: This routine writes out the output data on the -c specified output pressure levels. - - USE params - USE grib_mod - - implicit none - - CHARACTER(len=1),pointer,dimension(:) :: cgrib - type(gribfield) :: holdgfld - logical(1) valid_pt(kf),bmap(kf) - integer lout,kf,lugb,lugi,iodret,nlevsout,igoret,ipret,lev - integer gribver,ierr,ipack,lengrib,npoints,newlen,idrsnum - integer numcoord,ica,n,j - integer :: idrstmpl(200) - integer :: currlen=1000000 - integer :: listsec0(2)=(/0,2/) - integer :: igds(5)=(/0,0,0,0,0/),previgds(5) - integer kpds(200),kgds(200) - integer(4), parameter::idefnum=1 - integer(4) ideflist(idefnum),ibmap - real coordlist - real xoutdat(kf,nlevsout),xoutlevs_p(nlevsout) -c - iodret=0 - call baopenw (lout,"fort.51",igoret) - print *,'baopenw: igoret= ',igoret - - if (igoret /= 0) then - print *,' ' - print *,'!!! ERROR in vint in sub output_data opening' - print *,'!!! **OUTPUT** grib file. baopenw return codes:' - print *,'!!! grib file 1 return code = igoret = ',igoret - STOP 95 - return - endif - - levloop: do lev = 1,nlevsout - - if (gribver == 2) then - - ! Write data out as a GRIB2 message.... - - allocate(cgrib(currlen),stat=ica) - if (ica /= 0) then - print *,' ' - print *,'ERROR in output_data allocating cgrib' - print *,'ica= ',ica - iodret=95 - return - endif - - ! Ensure that cgrib array is large enough - - if (holdgfld%ifldnum == 1 ) then ! start new GRIB2 message - npoints=holdgfld%ngrdpts - else - npoints=npoints+holdgfld%ngrdpts - endif - newlen=npoints*4 - if ( newlen.gt.currlen ) then -ccc if (allocated(cgrib)) deallocate(cgrib) - if (associated(cgrib)) deallocate(cgrib) - allocate(cgrib(newlen),stat=ierr) -c call realloc (cgrib,currlen,newlen,ierr) - if (ierr == 0) then - print *,' ' - print *,'re-allocate for large grib msg: ' - print *,' currlen= ',currlen - print *,' newlen= ',newlen - currlen=newlen - else - print *,'ERROR returned from 2nd allocate cgrib = ',ierr - stop 95 - endif - endif - - ! Create new GRIB Message - listsec0(1)=holdgfld%discipline - listsec0(2)=holdgfld%version - - print *,'output, holdgfld%idsectlen= ',holdgfld%idsectlen - do j = 1,holdgfld%idsectlen - print *,' sect1, j= ',j,' holdgfld%idsect(j)= ' - & ,holdgfld%idsect(j) - enddo - - call gribcreate(cgrib,currlen,listsec0,holdgfld%idsect,ierr) - if (ierr.ne.0) then - write(6,*) ' ERROR creating new GRIB2 field (gribcreate)= ' - & ,ierr - stop 95 - endif - - previgds=igds - igds(1)=holdgfld%griddef - igds(2)=holdgfld%ngrdpts - igds(3)=holdgfld%numoct_opt - igds(4)=holdgfld%interp_opt - igds(5)=holdgfld%igdtnum - - if (igds(3) == 0) then - ideflist = 0 - endif - - call addgrid (cgrib,currlen,igds,holdgfld%igdtmpl - & ,holdgfld%igdtlen,ideflist,idefnum,ierr) - - if (ierr.ne.0) then - write(6,*) ' ERROR from addgrid adding GRIB2 grid = ',ierr - stop 95 - endif - - holdgfld%ipdtmpl(12) = int(xoutlevs_p(lev)) * 100 - - ipack = 40 - idrsnum = ipack - idrstmpl = 0 - - idrstmpl(2)= holdgfld%idrtmpl(2) - idrstmpl(3)= holdgfld%idrtmpl(3) - idrstmpl(6)= 0 - idrstmpl(7)= 255 - - numcoord=0 - coordlist=0.0 ! Only needed for hybrid vertical coordinate, - ! not here, so set it to 0.0 - - ! 0 - A bit map applies to this product and is specified in - ! this section - ! 255 - A bit map does not apply to this product - ibmap=255 ! Bitmap indicator (see Code Table 6.0) - - print *,' ' - print *,'output, holdgfld%ipdtlen= ',holdgfld%ipdtlen - do n = 1,holdgfld%ipdtlen - print *,'output, n= ',n,' holdgfld%ipdtmpl= ' - & ,holdgfld%ipdtmpl(n) - enddo - - print *,'output, kf= ',kf -c do n = 1,kf -c print *,'output, n= ',n,' xoutdat(n)= ',xoutdat(n) -c enddo - - call addfield (cgrib,currlen,holdgfld%ipdtnum,holdgfld%ipdtmpl - & ,holdgfld%ipdtlen,coordlist - & ,numcoord - & ,idrsnum,idrstmpl,200 - & ,xoutdat(1,lev),kf,ibmap,bmap,ierr) - - if (ierr /= 0) then - write(6,*) ' ERROR from addfield adding GRIB2 data = ',ierr - stop 95 - endif - -! Finalize GRIB message after all grids -! and fields have been added. It adds the End Section ( "7777" ) - - call gribend(cgrib,currlen,lengrib,ierr) - call wryte(lout,lengrib,cgrib) - - if (ierr == 0) then - print *,' ' - print *,'+++ GRIB2 write successful. ' - print *,' Len of message = currlen= ',currlen - print *,' Len of entire GRIB2 message = lengrib= ' - & ,lengrib - else - print *,' ERROR from gribend writing GRIB2 msg = ',ierr - stop 95 - endif - - else - - ! Write data out as a GRIB1 message.... - - kpds(7) = int(xoutlevs_p(lev)) - - print *,'In vint, just before call to putgb, kf= ',kf - call putgb (lout,kf,kpds,kgds,valid_pt,xoutdat(1,lev),ipret) - print *,'In vint, just after call to putgb, kf= ',kf - if (ipret == 0) then - print *,' ' - print *,'+++ IPRET = 0 after call to putgb in vint' - print *,' ' - else - print *,' ' - print *,'!!!!!! ERROR in vint.' - print *,'!!!!!! ERROR: IPRET NE 0 AFTER CALL TO PUTGB !!!' - print *,'!!!!!! Level index= ',lev - print *,'!!!!!! pressure= ',xoutlevs_p(lev) - print *,' ' - endif - - write(*,980) kpds(1),kpds(2) - write(*,981) kpds(3),kpds(4) - write(*,982) kpds(5),kpds(6) - write(*,983) kpds(7),kpds(8) - write(*,984) kpds(9),kpds(10) - write(*,985) kpds(11),kpds(12) - write(*,986) kpds(13),kpds(14) - write(*,987) kpds(15),kpds(16) - write(*,988) kpds(17),kpds(18) - write(*,989) kpds(19),kpds(20) - write(*,990) kpds(21),kpds(22) - write(*,991) kpds(23),kpds(24) - write(*,992) kpds(25) - write(*,880) kgds(1),kgds(2) - write(*,881) kgds(3),kgds(4) - write(*,882) kgds(5),kgds(6) - write(*,883) kgds(7),kgds(8) - write(*,884) kgds(9),kgds(10) - write(*,885) kgds(11),kgds(12) - write(*,886) kgds(13),kgds(14) - write(*,887) kgds(15),kgds(16) - write(*,888) kgds(17),kgds(18) - write(*,889) kgds(19),kgds(20) - write(*,890) kgds(21),kgds(22) - - 980 format(' kpds(1) = ',i7,' kpds(2) = ',i7) - 981 format(' kpds(3) = ',i7,' kpds(4) = ',i7) - 982 format(' kpds(5) = ',i7,' kpds(6) = ',i7) - 983 format(' kpds(7) = ',i7,' kpds(8) = ',i7) - 984 format(' kpds(9) = ',i7,' kpds(10) = ',i7) - 985 format(' kpds(11) = ',i7,' kpds(12) = ',i7) - 986 format(' kpds(13) = ',i7,' kpds(14) = ',i7) - 987 format(' kpds(15) = ',i7,' kpds(16) = ',i7) - 988 format(' kpds(17) = ',i7,' kpds(18) = ',i7) - 989 format(' kpds(19) = ',i7,' kpds(20) = ',i7) - 990 format(' kpds(21) = ',i7,' kpds(22) = ',i7) - 991 format(' kpds(23) = ',i7,' kpds(24) = ',i7) - 992 format(' kpds(25) = ',i7) - 880 format(' kgds(1) = ',i7,' kgds(2) = ',i7) - 881 format(' kgds(3) = ',i7,' kgds(4) = ',i7) - 882 format(' kgds(5) = ',i7,' kgds(6) = ',i7) - 883 format(' kgds(7) = ',i7,' kgds(8) = ',i7) - 884 format(' kgds(9) = ',i7,' kgds(10) = ',i7) - 885 format(' kgds(11) = ',i7,' kgds(12) = ',i7) - 886 format(' kgds(13) = ',i7,' kgds(14) = ',i7) - 887 format(' kgds(15) = ',i7,' kgds(16) = ',i7) - 888 format(' kgds(17) = ',i7,' kgds(18) = ',i7) - 889 format(' kgds(19) = ',i7,' kgds(20) = ',i7) - 890 format(' kgds(20) = ',i7,' kgds(22) = ',i7) - - endif - - enddo levloop -c - return - end -c -c----------------------------------------------------------------------- -c -c----------------------------------------------------------------------- - subroutine open_grib_files (lugb,lugi,lout,gribver,iret) - -C ABSTRACT: This subroutine must be called before any attempt is -C made to read from the input GRIB files. The GRIB and index files -C are opened with a call to baopenr. This call to baopenr was not -C needed in the cray version of this program (the files could be -C opened with a simple Cray assign statement), but the GRIB-reading -C utilities on the SP do require calls to this subroutine (it has -C something to do with the GRIB I/O being done in C on the SP, and -C the C I/O package needs an explicit open statement). -C -C INPUT: -C lugb The Fortran unit number for the GRIB data file -C lugi The Fortran unit number for the GRIB index file -C lout The Fortran unit number for the output grib file -c gribver integer (1 or 2) to indicate if using GRIB1 / GRIB2 -C -C OUTPUT: -C iret The return code from this subroutine - - implicit none - - character fnameg*7,fnamei*7,fnameo*7 - integer iret,gribver,lugb,lugi,lout,igoret,iioret,iooret - - iret=0 - fnameg(1:5) = "fort." - fnamei(1:5) = "fort." - fnameo(1:5) = "fort." - write(fnameg(6:7),'(I2)') lugb - write(fnamei(6:7),'(I2)') lugi - write(fnameo(6:7),'(I2)') lout - call baopenr (lugb,fnameg,igoret) - call baopenr (lugi,fnamei,iioret) - call baopenw (lout,fnameo,iooret) - - print *,' ' - print *,'vint: baopen: igoret= ',igoret,' iioret= ',iioret - & ,' iooret= ',iooret - - if (igoret /= 0 .or. iioret /= 0 .or. iooret /= 0) then - print *,' ' - print *,'!!! ERROR in vint.' - print *,'!!! ERROR in sub open_grib_files opening grib file' - print *,'!!! or grib index file. baopen return codes:' - print *,'!!! grib file return code = igoret = ',igoret - print *,'!!! index file return code = iioret = ',iioret - print *,'!!! output file return code = iooret = ',iooret - iret = 93 - return - endif - - return - end -c -c------------------------------------------------------------------- -c -c------------------------------------------------------------------- - subroutine bitmapchk (n,ld,d,dmin,dmax) -c -c This subroutine checks the bitmap for non-existent data values. -c Since the data from the regional models have been interpolated -c from either a polar stereographic or lambert conformal grid -c onto a lat/lon grid, there will be some gridpoints around the -c edges of this lat/lon grid that have no data; these grid -c points have been bitmapped out by Mark Iredell's interpolater. -c To provide another means of checking for invalid data points -c later in the program, set these bitmapped data values to a -c value of -999.0. The min and max of this array are also -c returned if a user wants to check for reasonable values. -c - logical(1) ld - dimension ld(n),d(n) -c - dmin=1.E15 - dmax=-1.E15 -c - do i=1,n - if (ld(i)) then - dmin=min(dmin,d(i)) - dmax=max(dmax,d(i)) - else - d(i) = -999.0 - endif - enddo -c - return - end diff --git a/test/README.md b/test/README.md new file mode 100644 index 0000000000..8d9d273ce2 --- /dev/null +++ b/test/README.md @@ -0,0 +1,115 @@ +# Global workflow comparison tools +A collection of tools to compare two different global workflow experiments for bitwise identicality. + +## Disclaimer + +These tools are still a work-in-progress. Use at your own risk. There is no guarantee every relevant file will be compared (but feel free to make a pull request adding more). + +# Usage + +## Quick start +### To compare two UFS run directories +``` +./diff_UFS_rundir.sh dirA dirB +``` +Where `dirA` and `dirB` are the two UFS run directories. + + +### To compare two ROTDIRs +``` +./diff_ROTDIR.sh dirA dirB +``` +Where `dirA` and `dirB` are the two cycle directories (`.../gfs.YYYYMMDD/HH/`) + +OR + +``` +./diff_ROTDIR.sh rotdir cdate expA expB +``` + +Where: +- `rotdir` is the root of your rotdirs (the portion of path the experiments share) +- `cdate` is the datetime of the cycle in YYYMMDDHH format +- `expA` and `expB` are the experiment names ($PSLOT) of each experiment + +## Description + +There are currently two tools included in this package: +* `diff_UFS_rundir.sh` will compare two UFS run directories (must have retained them by setting `KEEPDATA` to `NO` in config.base) +* `diff_ROTDIR.sh` will compare entire ROTDIRs + +Both scripts work similarly. You will need two experiments to compare. Typically this means a "baseline" experiment using the current develop and whatever feature you are working on. Experiments need to be for the same cycle and use all the same settings, otherwise there is no chance of them matching. Except for specific text files, file lists are constructed by globbing the first experiment directory, so if the second experiment contains files that would otherwise be included, they will be skipped. + +There are three classes of files compared: +- Text files, by simple posix diff +- GRiB2 files, using correaltion from `wgrib2` +- NetCDF files, using NetCDF Operators (nco) + +Text and grib2 files are processed first and complete quickly. NetCDF processing is currently a lot slower. + +Any variables listed in the coordinates.lst file will be ignored when comparing NetCDFs. This is because coordinate variables are not differenced, so when iterating through the variables of the difference they will be non-zero. + +## Output + +Output will appear like this: +``` +=== === + + +``` + +For text files, it will be the ouput of posix diff, which is just an empty string when identical: +``` +... + +=== field_table === + + +=== input.nml === +310,313c310,313 +< FNGLAC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/develop/fix/fix_am/global_glacier.2x2.grb' +< FNMXIC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/develop/fix/fix_am/global_maxice.2x2.grb' +< FNTSFC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/develop/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb' +< FNSNOC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/develop/fix/fix_am/global_snoclim.1.875.grb' +--- +> FNGLAC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/add_preamble/fix/fix_am/global_glacier.2x2.grb' +> FNMXIC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/add_preamble/fix/fix_am/global_maxice.2x2.grb' +> FNTSFC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/add_preamble/fix/fix_am/RTGSST.1982.2012.monthly.clim.grb' +> FNSNOC = '/scratch2/NCEPDEV/ensemble/save/Walter.Kolczynski/global-workflow/add_preamble/fix/fix_am/global_snoclim.1.875.grb' + +... +``` +(Text diffs have two extra blank line to separate the output.) + +Grib files will look like this if they are identical: +``` +=== GFSFLX.GrbF00 === +All fields are identical! +=== GFSFLX.GrbF03 === +All fields are identical! +=== GFSFLX.GrbF06 === +All fields are identical! +=== GFSFLX.GrbF09 === +All fields are identical! +=== GFSFLX.GrbF12 === +All fields are identical! + +... + +``` + +And NetCDFs will look like this: +``` +=== atmf000.nc === +0 differences found +=== atmf003.nc === +0 differences found +=== atmf006.nc === +0 differences found +=== atmf009.nc === +0 differences found + +... +``` + +If any variables in a grib or NetCDF do not match, they will be listed instead. diff --git a/test/coordinates.lst b/test/coordinates.lst new file mode 100644 index 0000000000..f175c2f047 --- /dev/null +++ b/test/coordinates.lst @@ -0,0 +1,8 @@ +grid_xt +grid_yt +lat +lon +pfull +phalf +time +time_iso diff --git a/test/diff_ROTDIR.sh b/test/diff_ROTDIR.sh new file mode 100755 index 0000000000..3a864f5f09 --- /dev/null +++ b/test/diff_ROTDIR.sh @@ -0,0 +1,162 @@ +#! /bin/env bash + +# +# Differences relevant output files in two different experiment ROTDIRs. +# Text files are compared via posix diff. GRiB files are compared via +# correlation reported by wgrib2. NetCDF files are compared by using +# NetCDF operators to calculate a diff then make sure all non-coordinate +# variable differences are zero. File lists are created by globbing key +# directories under the first experiment given. +# +# Syntax: +# diff_ROTDIR.sh [-c coord_file][-h] rotdir cdate expA expB +# +# OR +# +# diff_ROTDIR.sh [-c coord_file][-h] dirA dirB +# +# Arguments: +# rotdir: root rotdir where ROTDIRS are held +# cdate: experiment date/cycle in YYYYMMDDHH format +# expA, expB: experiment ids (PSLOT) to compare +# +# dirA, dirB: full paths to the cycle directories to be compared +# (${rotdir}/${exp}/gfs.${YYYYMMDD}/${cyc}) +# +# Options: +# -c coord_file: file containing a list of coordinate variables +# -h: print usage message and exit +# + +set -eu + +usage() { + # + # Print usage statement + # + echo <<- 'EOF' + Differences relevant output files in two different experiment ROTDIRs. + Text files are compared via posix diff. GRiB files are compared via + correlation reported by wgrib2. NetCDF files are compared by using + NetCDF operators to calculate a diff then make sure all non-coordinate + variable differences are zero. File lists are created by globbing key + directories under the first experiment given. + + Syntax: + diff_ROTDIR.sh [-c coord_file][-h] rotdir cdate expA expB + + OR + + diff_ROTDIR.sh [-c coord_file][-h] dirA dirB + + Arguments: + rotdir: root rotdir where ROTDIRS are held + cdate: experiment date/cycle in YYYYMMDDHH format + expA, expB: experiment ids (PSLOT) to compare + + dirA, dirB: full paths to the cycle directories to be compared + (${rotdir}/${exp}/gfs.${YYYYMMDD}/${cyc}) + + Options: + -c coord_file: file containing a list of coordinate variables + -h: print usage message and exit + EOF +} + +while getopts ":c:h" option; do + case "${option}" in + c) coord_file=${OPTARG} ;; + h) usage; exit 0 ;; + *) echo "Unknown option ${option}"; exit 1 ;; + esac +done + +num_args=$# +case $num_args in + 2) # Direct directory paths + dirA=$1 + dirB=$2 + ;; + 4) # Derive directory paths + rotdir=$1 + date=$2 + expA=$3 + expB=$4 + + YYYYMMDD=$(echo $date | cut -c1-8) + cyc=$(echo $date | cut -c9-10) + dirA="$rotdir/$expA/gfs.${YYYYMMDD}/${cyc}" + dirB="$rotdir/$expB/gfs.${YYYYMMDD}/${cyc}" + ;; + *) # Unknown option + echo "${num_args} is not a valid number of arguments, use 2 or 4" + usage + exit 1 + ;; +esac + +temp_file=".diff.nc" + +# Contains a bunch of NetCDF Operator shortcuts (will load nco module) +source ./netcdf_op_functions.sh +source ./test_utils.sh + +coord_file="${coord_file:-./coordinates.lst}" + +## Text files +files="" +files="${files} atmos/input.nml" # This file will be different because of the fix paths +files="${files} $(basename_list 'atmos/' "$dirA/atmos/storms.*" "$dirA/atmos/trak.*")" +if [[ -d $dirA/ice ]]; then + files="${files} ice/ice_in" +fi +if [[ -d $dirA/ocean ]]; then + files="${files} ocean/MOM_input" +fi +# if [[ -d $dirA/wave ]]; then +# files="${files} $(basename_list 'wave/station/' "$dirA/wave/station/*bull_tar")" +# fi + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + diff $fileA $fileB || : +done + +## GRiB files + +module load wgrib2/2.0.8 + +files="" +files="${files} $(basename_list 'atmos/' $dirA/atmos/*grb2* $dirA/atmos/*.flux.*)" +if [[ -d $dirA/wave ]]; then + files="${files} $(basename_list 'wave/gridded/' $dirA/wave/gridded/*.grib2)" +fi +if [[ -d $dirA/ocean ]]; then + files="${files} $(basename_list 'ocean/' $dirA/ocean/*grb2)" +fi + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + ./diff_grib_files.py $fileA $fileB +done + +## NetCDF Files +files="" +files="${files} $(basename_list 'atmos/' $dirA/atmos/*.nc)" +if [[ -d $dirA/ice ]]; then + files="${files} $(basename_list 'ice/' $dirA/ice/*.nc)" +fi +if [[ -d $dirA/ocean ]]; then + files="${files} $(basename_list 'ocean/' $dirA/ocean/*.nc)" +fi + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + nccmp -q $fileA $fileB $coord_file +done diff --git a/test/diff_UFS_rundir.sh b/test/diff_UFS_rundir.sh new file mode 100755 index 0000000000..fac2242a65 --- /dev/null +++ b/test/diff_UFS_rundir.sh @@ -0,0 +1,110 @@ +#! /bin/env bash + +# +# Differences relevant output files in two UFS model directories. GRiB files +# are compared via correlation reported by wgrib2. NetCDF files are compared +# by using NetCDF operators to calculate a diff then make sure all non- +# coordinate variable differences are zero. +# +# Syntax: +# diff_UFS_rundir.sh [-c coord_file][-h] dirA dirB +# +# Arguments: +# dirA, dirB: full paths to the UFS run directories to be compared +# +# Options: +# -c coord_file: file containing a list of coordinate variables +# -h: print usage message and exit +# + +set -eu + +usage() { + # + # Print usage statement + # + echo <<- 'EOF' + Differences relevant output files in two UFS model directories. GRiB files + are compared via correlation reported by wgrib2. NetCDF files are compared + by using NetCDF operators to calculate a diff then make sure all non- + coordinate variable differences are zero. + + Syntax: + diff_UFS_rundir.sh [-c coord_file][-h] dirA dirB + + Arguments: + dirA, dirB: full paths to the UFS run directories to be compared + + Options: + -c coord_file: file containing a list of coordinate variables + -h: print usage message and exit + EOF +} + +while getopts ":c:h" option; do + case "${option}" in + c) coord_file=${OPTARG} ;; + h) usage; exit 0 ;; + *) echo "Unknown option ${option}"; exit 1 ;; + esac +done + +num_args=$# +case $num_args in + 2) # Direct directory paths + dirA=$1 + dirB=$2 + ;; + *) # Unknown option + echo "${num_args} is not a valid number of arguments, use 2" + usage + exit 1 + ;; +esac + +source ./netcdf_op_functions.sh +source ./test_utils.sh + +temp_file=".diff.nc" +coord_file="${coord_file:-./coordinates.lst}" + +# Input files +files="data_table diag_table fd_nems.yaml field_table ice_in input.nml med_modelio.nml \ + model_configure nems.configure pio_in ww3_multi.inp ww3_shel.inp" + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + if [[ -f "$fileA" ]]; then + diff $fileA $fileB || : + else + echo ; echo; +done + +# GRiB files +files="$(basename_list '' $dirA/GFSFLX.Grb*)" + +module load wgrib2/2.0.8 + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + ./diff_grib_files.py $fileA $fileB +done + +# NetCDF Files +files="" +files="${files} $(basename_list '' $dirA/atmf*.nc $dirA/sfcf*.nc)" +if [[ -d "$dirA/history" ]]; then + files="$(basename_list 'history/' $dirA/history/*.nc)" +fi + +for file in $files; do + echo "=== ${file} ===" + fileA="$dirA/$file" + fileB="$dirB/$file" + nccmp -q $fileA $fileB $coord_file +done + diff --git a/test/diff_grib_files.py b/test/diff_grib_files.py new file mode 100755 index 0000000000..9c01afbb18 --- /dev/null +++ b/test/diff_grib_files.py @@ -0,0 +1,76 @@ +#! /bin/env python3 +''' +Compares two grib2 files and print any variables that have a + non-identity correlation. + +Syntax +------ +diff_grib_files.py fileA fileB + +Parameters +---------- +fileA: string + Path to the first grib2 file +fileB: string + Path to the second grib2 file + +''' +import re +import sys +import subprocess + +# TODO - Update to also check the min just in case the grib files have a constant offset + + +def count_nonid_corr(test_string: str, quiet=False): + ''' + Scan a wgrib2 print of the correlation between two values and count + how many variables have a non-identity correlation. Any such variables + are printed. + + wgrib2 is assumed to be invoked by the following command: + wgrib2 {fileA} -var -rpn 'sto_1' -import_grib {fileB} -rpn 'rcl_1:print_corr' + + Parameters + ---------- + test_string: str + STDOUT from wgrib2 call. + + quiet: bool, optional + Whether to suppress print messages of non-identy variables and summary. + + Returns + ------- + int + Number of non-identify correlations represented in the string. + + + ''' + pattern = re.compile(r"(\d+:\d+:)(?P.*):rpn_corr=(?P.*)") + matches = [m.groupdict() for m in pattern.finditer(test_string)] + + count = 0 + for match in matches: + if float(match['corr']) != 1.0: + count = count + 1 + if not quiet: + print(f"{match['var']}: corr={match['corr']}") + + if not quiet: + if count == 0: + print("All fields are identical!") + else: + print(f"{count} variables are different") + + return count + + +if __name__ == '__main__': + fileA = sys.argv[0] + fileB = sys.argv[1] + + wgrib2_cmd = f"wgrib2 {fileA} -var -rpn 'sto_1' -import_grib {fileB} -rpn 'rcl_1:print_corr'" + + string = subprocess.run(wgrib2_cmd, shell=True, stdout=subprocess.PIPE).stdout.decode("utf-8") + + count_nonid_corr(string) diff --git a/test/netcdf_op_functions.sh b/test/netcdf_op_functions.sh new file mode 100644 index 0000000000..0085855ea3 --- /dev/null +++ b/test/netcdf_op_functions.sh @@ -0,0 +1,177 @@ +#! /bin/env bash + +if [ -t 0 ]; then + module load nco/4.9.3 +fi + +## NetCDF operator shortcuts +# From nco.sourceforge.net/nco.html#Filters-for-ncks +# ncattget $att_nm $var_nm $fl_nm : What attributes does variable have? +function ncattget { ncks --trd -M -m ${3} | grep -E -i "^${2} attribute [0-9]+: ${1}" | cut -f 11- -d ' ' | sort ; } +# ncunits $att_val $fl_nm : Which variables have given units? +function ncunits { ncks --trd -m ${2} | grep -E -i " attribute [0-9]+: units.+ ${1}" | cut -f 1 -d ' ' | sort ; } +# ncavg $var_nm $fl_nm : What is mean of variable? +function ncavg { + temp_file=${PTMP:-$HOME}/foo.nc + ncwa -y avg -O -C -v ${1} ${2} ${temp_file} + ncks --trd -H -C -v ${1} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncavg $var_nm $fl_nm : What is mean of variable? +function ncavg { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=${1}.avg();print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncdmnlst $fl_nm : What dimensions are in file? +function ncdmnlst { ncks --cdl -m ${1} | cut -d ':' -f 1 | cut -d '=' -s -f 1 ; } +# ncvardmnlst $var_nm $fl_nm : What dimensions are in a variable? +function ncvardmnlst { ncks --trd -m -v ${1} ${2} | grep -E -i "^${1} dimension [0-9]+: " | cut -f 4 -d ' ' | sed 's/,//' ; } +# ncvardmnlatlon $var_nm $fl_nm : Does variable contain both lat and lon dimensions? +# function ncvardmnlatlon { flg=$(ncks -C -v ${1} -m ${2} | grep -E -i "${1}\(" | grep -E "lat.*lon|lon.*lat") ; [[ ! -z "$flg" ]] && echo "Yes, ${1} has both lat and lon dimensions" || echo "No, ${1} does not have both lat and lon dimensions" } +# ncdmnsz $dmn_nm $fl_nm : What is dimension size? +function ncdmnsz { ncks --trd -m -M ${2} | grep -E -i ": ${1}, size =" | cut -f 7 -d ' ' | uniq ; } +# ncgrplst $fl_nm : What groups are in file? +function ncgrplst { ncks -m ${1} | grep 'group:' | cut -d ':' -f 2 | cut -d ' ' -f 2 | sort ; } +# ncvarlst $fl_nm : What variables are in file? +function ncvarlst { ncks --trd -m ${1} | grep -E ': type' | cut -f 1 -d ' ' | sed 's/://' | sort ; } +# ncmax $var_nm $fl_nm : What is maximum of variable? +function ncmax { + temp_file=${PTMP:-$HOME}/foo.nc + ncwa -y max -O -C -v ${1} ${2} ${temp_file} + ncks --trd -H -C -v ${1} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncmax $var_nm $fl_nm : What is maximum of variable? +function ncmax { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=${1}.max();print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncmdn $var_nm $fl_nm : What is median of variable? +function ncmdn { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=gsl_stats_median_from_sorted_data(${1}.sort());print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncmin $var_nm $fl_nm : What is minimum of variable? +function ncmin { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=${1}.min();print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncrng $var_nm $fl_nm : What is range of variable? +function ncrng { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo_min=${1}.min();foo_max=${1}.max();print(foo_min,\"%f\");print(\" to \");print(foo_max,\"%f\")" ${2} ${temp_file} + rm ${temp_file} +} +# ncmode $var_nm $fl_nm : What is mode of variable? +function ncmode { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo=gsl_stats_median_from_sorted_data(${1}.sort());print(foo)" ${2} ${temp_file} | cut -f 3- -d ' ' + rm ${temp_file} +} +# ncrecsz $fl_nm : What is record dimension size? +function ncrecsz { ncks --trd -M ${1} | grep -E -i "^Root record dimension 0:" | cut -f 10- -d ' ' ; } +# nctypget $var_nm $fl_nm : What type is variable? +function nctypget { ncks --trd -m -v ${1} ${2} | grep -E -i "^${1}: type" | cut -f 3 -d ' ' | cut -f 1 -d ',' ; } + +function nccorr() { + temp_file=${PTMP:-$HOME}/foo.nc + ncap2 -O -C -v -s "foo_min=${1}.min();foo_max=${1}.max();print(foo_min,\"%f\");print(\" to \");print(foo_max,\"%f\")" ${2} ${temp_file} + rm ${temp_file} +} + +# Heavily modified from original +function nccmp() { + # + # Compare two netcdf files + # + # Uses ncdiff to create a difference of two NetCDFs, then checks to + # make sure all non-coordinate fields of the diff are zero. + # + # Syntax: + # nccmp [-q][-z] fileA fileB coord_file + # + # Arguments: + # fileA, fileB: NetCDFs to be compared + # coord_file: File containing coordinate variables + # + # Options: + # -q: quiet mode (implies -z) + # -z: suppress displaying fields with zero difference + # + # Notes: + # Will create a temporary file .diff in the $PTMP directory + # if PTMP is defined, otherwise .diff is created in the + # current directory. + # + + local OPTIND + suppress_msg="" + hide_zeros="NO" + quiet="NO" + while getopts ":qz" option; do + case "${option}" in + q) quiet="YES" ;& + z) suppress_msg=" (Suppressing zero difference fields)" + hide_zeros="YES" + ;; + *) echo "Unknown option ${option}" + ;; + esac + done + shift "$((OPTIND-1))" + fileA="${1}" + fileB="${2}" + coord_file="${3:-/dev/null}" + temp_file="${PTMP:-$(pwd)}/.diff" + if [[ ${quiet} == "NO" ]]; then + echo + echo "Comparing ${fileA} and ${fileB}" + fi + # Create diff of the files + ncdiff ${fileA} ${fileB} ${temp_file} --overwrite + if [[ ${quiet} == "NO" ]]; then + echo "Difference report:${suppress_msg}" + echo "(Coordinate variables will always be non-zero)" + fi + count=0 + # Check each variable + for var in $(ncvarlst ${temp_file}); do + if [[ $(egrep -o "^${var}\$" ${coord_file} | wc -l) == 0 ]]; then + # Variable is not in coordinate list + max=$(ncmax $var $temp_file 2> /dev/null) + if [[ -z $max ]]; then + echo "Error reading max of ${var}" + count=$((count + 1)) + continue + fi + min=$(ncmin $var $temp_file 2> /dev/null) + if [[ -z $min ]]; then + echo "Error reading min of ${var}" + count=$((count + 1)) + continue + fi + if [[ ${hide_zeros} == "NO" ]] || (( $(echo "$max != 0 || $min != 0" | bc) )); then + # Min/max is not zero or we are not hiding zeros + echo "${var}: ${min}..${max}" + count=$((count + 1)) + fi + else + # + # ncdiff doesn't difference coordinate variables. Instead coordinates + # are just placed in the diff file. While this is generally what we + # want, when checking for equivilence we need to ignore them. + # + if [[ ${quiet} == "NO" ]]; then + echo "$Coordinate ${var} ignored" + fi + fi + done + rm $temp_file + echo "${count} differences found" +} + + diff --git a/test/test_utils.sh b/test/test_utils.sh new file mode 100644 index 0000000000..b00e1d49cf --- /dev/null +++ b/test/test_utils.sh @@ -0,0 +1,26 @@ +#! /bin/env bash + +basename_list() { + # + # Take a list of paths, determines the base name, then + # prepends it to a base path. + # + # Syntax: + # basename_list base file_in* + # + # Arguments: + # base: Common root directory of all paths in list + # file_in: List of paths relative to $base/ + # + # Returns: + # List of paths constructed by prepending $base to each + # item in $file_in + # + base="${1}" + list="" + + for file_in in "${@:2}"; do + list="$list ${base}$(basename $file_in)" + done + echo $list +} diff --git a/ush/calcanl_gfs.py b/ush/calcanl_gfs.py new file mode 100755 index 0000000000..a325ec35b3 --- /dev/null +++ b/ush/calcanl_gfs.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python +# calcanl_gfs.py +# cory.r.martin@noaa.gov +# 2019-10-11 +# script to run executables to produce netCDF analysis +# on GFS gaussian grid for downstream users +import os +import shutil +import subprocess +import sys +import gsi_utils +from collections import OrderedDict +import datetime + + +# function to calculate analysis from a given increment file and background +def calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, + ComIn_Ges, GPrefix, + FixDir, atmges_ens_mean, RunDir, NThreads, NEMSGet, IAUHrs, + ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, Cdump): + print('calcanl_gfs beginning at: ', datetime.datetime.utcnow()) + + IAUHH = IAUHrs + + # copy and link files + if DoIAU and l4DEnsVar and Write4Danl: + for fh in IAUHH: + if fh == 6: + # for full res analysis + CalcAnlDir = RunDir + '/calcanl_' + format(fh, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + gsi_utils.copy_file(ExecAnl, CalcAnlDir + '/calc_anl.x') + gsi_utils.link_file(RunDir + '/siginc.nc', CalcAnlDir + '/siginc.nc.06') + gsi_utils.link_file(RunDir + '/sigf06', CalcAnlDir + '/ges.06') + gsi_utils.link_file(RunDir + '/siganl', CalcAnlDir + '/anl.06') + gsi_utils.copy_file(ExecChgresInc, CalcAnlDir + '/chgres_inc.x') + # for ensemble res analysis + if Cdump in ["gdas", "gfs"]: + CalcAnlDir = RunDir + '/calcanl_ensres_' + format(fh, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + gsi_utils.copy_file(ExecAnl, CalcAnlDir + '/calc_anl.x') + gsi_utils.link_file(RunDir + '/siginc.nc', CalcAnlDir + '/siginc.nc.06') + gsi_utils.link_file(ComOut + '/' + APrefix + 'atmanl.ensres.nc', CalcAnlDir + '/anl.ensres.06') + gsi_utils.link_file(ComIn_Ges + '/' + GPrefix + 'atmf006.ensres.nc', CalcAnlDir + '/ges.ensres.06') + gsi_utils.link_file(RunDir + '/sigf06', CalcAnlDir + '/ges.06') + else: + if os.path.isfile('sigi' + format(fh, '02') + '.nc'): + # for full res analysis + CalcAnlDir = RunDir + '/calcanl_' + format(fh, '02') + CalcAnlDir6 = RunDir + '/calcanl_' + format(6, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + if not os.path.exists(CalcAnlDir6): + gsi_utils.make_dir(CalcAnlDir6) + gsi_utils.link_file(ComOut + '/' + APrefix + 'atma' + format(fh, '03') + '.nc', + CalcAnlDir6 + '/anl.' + format(fh, '02')) + gsi_utils.link_file(RunDir + '/siga' + format(fh, '02'), + CalcAnlDir6 + '/anl.' + format(fh, '02')) + gsi_utils.link_file(RunDir + '/sigi' + format(fh, '02') + '.nc', + CalcAnlDir + '/siginc.nc.' + format(fh, '02')) + gsi_utils.link_file(CalcAnlDir6 + '/inc.fullres.' + format(fh, '02'), + CalcAnlDir + '/inc.fullres.' + format(fh, '02')) + gsi_utils.link_file(RunDir + '/sigf' + format(fh, '02'), + CalcAnlDir6 + '/ges.' + format(fh, '02')) + gsi_utils.link_file(RunDir + '/sigf' + format(fh, '02'), + CalcAnlDir + '/ges.' + format(fh, '02')) + gsi_utils.copy_file(ExecChgresInc, CalcAnlDir + '/chgres_inc.x') + # for ensemble res analysis + CalcAnlDir = RunDir + '/calcanl_ensres_' + format(fh, '02') + CalcAnlDir6 = RunDir + '/calcanl_ensres_' + format(6, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + if not os.path.exists(CalcAnlDir6): + gsi_utils.make_dir(CalcAnlDir6) + gsi_utils.link_file(ComOut + '/' + APrefix + 'atma' + format(fh, '03') + '.ensres.nc', + CalcAnlDir6 + '/anl.ensres.' + format(fh, '02')) + gsi_utils.link_file(RunDir + '/sigi' + format(fh, '02') + '.nc', + CalcAnlDir6 + '/siginc.nc.' + format(fh, '02')) + gsi_utils.link_file(ComIn_Ges + '/' + GPrefix + 'atmf' + format(fh, '03') + '.ensres.nc', + CalcAnlDir6 + '/ges.ensres.' + format(fh, '02')) + + else: + # for full res analysis + CalcAnlDir = RunDir + '/calcanl_' + format(6, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + gsi_utils.copy_file(ExecAnl, CalcAnlDir + '/calc_anl.x') + gsi_utils.link_file(RunDir + '/siginc.nc', CalcAnlDir + '/siginc.nc.06') + gsi_utils.link_file(RunDir + '/sigf06', CalcAnlDir + '/ges.06') + gsi_utils.link_file(RunDir + '/siganl', CalcAnlDir + '/anl.06') + gsi_utils.copy_file(ExecChgresInc, CalcAnlDir + '/chgres_inc.x') + # for ensemble res analysis + CalcAnlDir = RunDir + '/calcanl_ensres_' + format(6, '02') + if not os.path.exists(CalcAnlDir): + gsi_utils.make_dir(CalcAnlDir) + gsi_utils.copy_file(ExecAnl, CalcAnlDir + '/calc_anl.x') + gsi_utils.link_file(RunDir + '/siginc.nc', CalcAnlDir + '/siginc.nc.06') + gsi_utils.link_file(ComOut + '/' + APrefix + 'atmanl.ensres.nc', CalcAnlDir + '/anl.ensres.06') + gsi_utils.link_file(ComIn_Ges + '/' + GPrefix + 'atmf006.ensres.nc', CalcAnlDir + '/ges.ensres.06') + + # get dimension information from background and increment files + AnlDims = gsi_utils.get_ncdims('siginc.nc') + GesDims = gsi_utils.get_ncdims('sigf06') + + levs = AnlDims['lev'] + LonA = AnlDims['lon'] + LatA = AnlDims['lat'] + LonB = GesDims['grid_xt'] + LatB = GesDims['grid_yt'] + + # vertical coordinate info + levs2 = levs + 1 + siglevel = FixDir + '/global_hyblev.l' + str(levs2) + '.txt' + + # determine how many forecast hours to process + nFH = 0 + for fh in IAUHH: + # first check to see if increment file exists + CalcAnlDir = RunDir + '/calcanl_' + format(fh, '02') + if (os.path.isfile(CalcAnlDir + '/siginc.nc.' + format(fh, '02'))): + print('will process increment file: ' + CalcAnlDir + '/siginc.nc.' + format(fh, '02')) + nFH += 1 + else: + print('Increment file: ' + CalcAnlDir + '/siginc.nc.' + format(fh, '02') + ' does not exist. Skipping.') + + sys.stdout.flush() + # need to gather information about runtime environment + ExecCMD = ExecCMD.replace("$ncmd", "1") + os.environ['OMP_NUM_THREADS'] = str(NThreads) + os.environ['ncmd'] = str(nFH) + ExecCMDMPI1 = ExecCMDMPI.replace("$ncmd", str(1)) + ExecCMDMPI = ExecCMDMPI.replace("$ncmd", str(nFH)) + ExecCMDLevs = ExecCMDMPI.replace("$ncmd", str(levs)) + ExecCMDMPI10 = ExecCMDMPI.replace("$ncmd", str(10)) + + # are we using mpirun with lsf, srun, or aprun with Cray? + launcher = ExecCMDMPI.split(' ')[0] + if launcher == 'mpirun': + hostfile = os.getenv('LSB_DJOB_HOSTFILE', '') + with open(hostfile) as f: + hosts_tmp = f.readlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + hosts = [] + [hosts.append(x) for x in hosts_tmp if x not in hosts] + nhosts = len(hosts) + ExecCMDMPI_host = 'mpirun -np ' + str(nFH) + ' --hostfile hosts' + tasks = int(os.getenv('LSB_DJOB_NUMPROC', 1)) + if levs > tasks: + ExecCMDMPILevs_host = 'mpirun -np ' + str(tasks) + ' --hostfile hosts' + ExecCMDMPILevs_nohost = 'mpirun -np ' + str(tasks) + else: + ExecCMDMPILevs_host = 'mpirun -np ' + str(levs) + ' --hostfile hosts' + ExecCMDMPILevs_nohost = 'mpirun -np ' + str(levs) + ExecCMDMPI1_host = 'mpirun -np 1 --hostfile hosts' + ExecCMDMPI10_host = 'mpirun -np 10 --hostfile hosts' + elif launcher == 'mpiexec': + hostfile = os.getenv('PBS_NODEFILE', '') + with open(hostfile) as f: + hosts_tmp = f.readlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + hosts = [] + [hosts.append(x) for x in hosts_tmp if x not in hosts] + nhosts = len(hosts) + ExecCMDMPI_host = 'mpiexec -l -n ' + str(nFH) + tasks = int(os.getenv('ntasks', 1)) + print('nhosts,tasks=', nhosts, tasks) + if levs > tasks: + ExecCMDMPILevs_host = 'mpiexec -l -n ' + str(tasks) + ExecCMDMPILevs_nohost = 'mpiexec -l -n ' + str(tasks) + else: + ExecCMDMPILevs_host = 'mpiexec -l -n ' + str(levs) + ExecCMDMPILevs_nohost = 'mpiexec -l -n ' + str(levs) + ExecCMDMPI1_host = 'mpiexec -l -n 1 --cpu-bind depth --depth ' + str(NThreads) + ExecCMDMPI10_host = 'mpiexec -l -n 10 --cpu-bind depth --depth ' + str(NThreads) + elif launcher == 'srun': + nodes = os.getenv('SLURM_JOB_NODELIST', '') + hosts_tmp = subprocess.check_output('scontrol show hostnames ' + nodes, shell=True) + if (sys.version_info > (3, 0)): + hosts_tmp = hosts_tmp.decode('utf-8') + hosts_tmp = str(hosts_tmp).splitlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + else: + hosts_tmp = hosts_tmp.strip() + hosts_tmp = str(hosts_tmp).splitlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + hosts = [] + [hosts.append(x) for x in hosts_tmp if x not in hosts] + nhosts = len(hosts) + ExecCMDMPI_host = 'srun -n ' + str(nFH) + ' --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + # need to account for when fewer than LEVS tasks are available + tasks = int(os.getenv('SLURM_NPROCS', 1)) + if levs > tasks: + ExecCMDMPILevs_host = 'srun -n ' + str(tasks) + ' --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + ExecCMDMPILevs_nohost = 'srun -n ' + str(tasks) + ' --verbose --export=ALL' + else: + ExecCMDMPILevs_host = 'srun -n ' + str(levs) + ' --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + ExecCMDMPILevs_nohost = 'srun -n ' + str(levs) + ' --verbose --export=ALL' + ExecCMDMPI1_host = 'srun -n 1 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + ExecCMDMPI10_host = 'srun -n 10 --verbose --export=ALL -c 1 --distribution=arbitrary --cpu-bind=cores' + elif launcher == 'aprun': + hostfile = os.getenv('LSB_DJOB_HOSTFILE', '') + with open(hostfile) as f: + hosts_tmp = f.readlines() + hosts_tmp = [x.strip() for x in hosts_tmp] + hosts = [] + [hosts.append(x) for x in hosts_tmp if x not in hosts] + nhosts = len(hosts) + ExecCMDMPI_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n ' + str(nFH) + ExecCMDMPILevs_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n ' + str(levs) + ExecCMDMPILevs_nohost = 'aprun -d ' + str(NThreads) + ' -n ' + str(levs) + ExecCMDMPI1_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n 1' + ExecCMDMPI10_host = 'aprun -l hosts -d ' + str(NThreads) + ' -n 10' + else: + print('unknown MPI launcher. Failure.') + sys.exit(1) + + # generate the full resolution analysis + ihost = 0 + # interpolate increment to full background resolution + for fh in IAUHH: + # first check to see if increment file exists + CalcAnlDir = RunDir + '/calcanl_' + format(fh, '02') + if (os.path.isfile(CalcAnlDir + '/siginc.nc.' + format(fh, '02'))): + print('Interpolating increment for f' + format(fh, '03')) + # set up the namelist + namelist = OrderedDict() + namelist["setup"] = {"lon_out": LonB, + "lat_out": LatB, + "lev": levs, + "infile": "'siginc.nc." + format(fh, '02') + "'", + "outfile": "'inc.fullres." + format(fh, '02') + "'", + } + gsi_utils.write_nml(namelist, CalcAnlDir + '/fort.43') + + if ihost >= nhosts: + ihost = 0 + with open(CalcAnlDir + '/hosts', 'w') as hostfile: + hostfile.write(hosts[ihost] + '\n') + if launcher == 'srun': # need to write host per task not per node for slurm + # For xjet, each instance of chgres_inc must run on two nodes each + if os.getenv('SLURM_JOB_PARTITION', '') == 'xjet': + for a in range(0, 4): + hostfile.write(hosts[ihost] + '\n') + ihost += 1 + for a in range(0, 5): + hostfile.write(hosts[ihost] + '\n') + for a in range(0, 9): # need 9 more of the same host for the 10 tasks for chgres_inc + hostfile.write(hosts[ihost] + '\n') + if launcher == 'srun': + os.environ['SLURM_HOSTFILE'] = CalcAnlDir + '/hosts' + print('interp_inc', fh, namelist) + job = subprocess.Popen(ExecCMDMPI10_host + ' ' + CalcAnlDir + '/chgres_inc.x', shell=True, cwd=CalcAnlDir) + print(ExecCMDMPI10_host + ' ' + CalcAnlDir + '/chgres_inc.x submitted on ' + hosts[ihost]) + sys.stdout.flush() + ec = job.wait() + if ec != 0: + print('Error with chgres_inc.x at forecast hour: f' + format(fh, '03')) + print('Error with chgres_inc.x, exit code=' + str(ec)) + print(locals()) + sys.exit(ec) + ihost += 1 + else: + print('f' + format(fh, '03') + ' is in $IAUFHRS but increment file is missing. Skipping.') + + # generate analysis from interpolated increment + CalcAnlDir6 = RunDir + '/calcanl_' + format(6, '02') + # set up the namelist + namelist = OrderedDict() + namelist["setup"] = {"datapath": "'./'", + "analysis_filename": "'anl'", + "firstguess_filename": "'ges'", + "increment_filename": "'inc.fullres'", + "fhr": 6, + } + + gsi_utils.write_nml(namelist, CalcAnlDir6 + '/calc_analysis.nml') + + # run the executable + if ihost >= nhosts - 1: + ihost = 0 + if launcher == 'srun': + del os.environ['SLURM_HOSTFILE'] + print('fullres_calc_anl', namelist) + fullres_anl_job = subprocess.Popen(ExecCMDMPILevs_nohost + ' ' + CalcAnlDir6 + '/calc_anl.x', shell=True, cwd=CalcAnlDir6) + print(ExecCMDMPILevs_nohost + ' ' + CalcAnlDir6 + '/calc_anl.x submitted') + + sys.stdout.flush() + exit_fullres = fullres_anl_job.wait() + sys.stdout.flush() + if exit_fullres != 0: + print('Error with calc_analysis.x for deterministic resolution, exit code=' + str(exit_fullres)) + print(locals()) + sys.exit(exit_fullres) + + # compute determinstic analysis on ensemble resolution + if Cdump in ["gdas", "gfs"]: + chgres_jobs = [] + for fh in IAUHH: + # first check to see if guess file exists + CalcAnlDir6 = RunDir + '/calcanl_ensres_06' + print(CalcAnlDir6 + '/ges.ensres.' + format(fh, '02')) + if (os.path.isfile(CalcAnlDir6 + '/ges.ensres.' + format(fh, '02'))): + print('Calculating analysis on ensemble resolution for f' + format(fh, '03')) + # generate ensres analysis from interpolated background + # set up the namelist + namelist = OrderedDict() + namelist["setup"] = {"datapath": "'./'", + "analysis_filename": "'anl.ensres'", + "firstguess_filename": "'ges.ensres'", + "increment_filename": "'siginc.nc'", + "fhr": fh, + } + + gsi_utils.write_nml(namelist, CalcAnlDir6 + '/calc_analysis.nml') + + # run the executable + if ihost > nhosts - 1: + ihost = 0 + print('ensres_calc_anl', namelist) + ensres_anl_job = subprocess.Popen(ExecCMDMPILevs_nohost + ' ' + CalcAnlDir6 + '/calc_anl.x', shell=True, cwd=CalcAnlDir6) + print(ExecCMDMPILevs_nohost + ' ' + CalcAnlDir6 + '/calc_anl.x submitted') + + sys.stdout.flush() + # check on analysis steps + exit_ensres = ensres_anl_job.wait() + if exit_ensres != 0: + print('Error with calc_analysis.x for ensemble resolution, exit code=' + str(exit_ensres)) + print(locals()) + sys.exit(exit_ensres) + else: + print('f' + format(fh, '03') + ' is in $IAUFHRS but ensemble resolution guess file is missing. Skipping.') + + print('calcanl_gfs successfully completed at: ', datetime.datetime.utcnow()) + print(locals()) + + +# run the function if this script is called from the command line +if __name__ == '__main__': + DoIAU = gsi_utils.isTrue(os.getenv('DOIAU', 'NO')) + l4DEnsVar = gsi_utils.isTrue(os.getenv('l4densvar', 'NO')) + Write4Danl = gsi_utils.isTrue(os.getenv('lwrite4danl', 'NO')) + ComIn_Ges = os.getenv('COM_ATMOS_HISTORY_PREV', './') + GPrefix = os.getenv('GPREFIX', './') + ComOut = os.getenv('COM_ATMOS_ANALYSIS', './') + APrefix = os.getenv('APREFIX', '') + NThreads = os.getenv('NTHREADS_CHGRES', 1) + FixDir = os.getenv('FIXgsm', './') + atmges_ens_mean = os.getenv('ATMGES_ENSMEAN', './atmges_ensmean') + RunDir = os.getenv('DATA', './') + ExecCMD = os.getenv('APRUN_CALCANL', '') + ExecCMDMPI = os.getenv('APRUN_CALCINC', '') + ExecAnl = os.getenv('CALCANLEXEC', './calc_analysis.x') + ExecChgresInc = os.getenv('CHGRESINCEXEC', './interp_inc.x') + NEMSGet = os.getenv('NEMSIOGET', 'nemsio_get') + IAUHrs = list(map(int, os.getenv('IAUFHRS', '6').split(','))) + Cdump = os.getenv('CDUMP', 'gdas') + + print(locals()) + calcanl_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, + ComIn_Ges, GPrefix, + FixDir, atmges_ens_mean, RunDir, NThreads, NEMSGet, IAUHrs, + ExecCMD, ExecCMDMPI, ExecAnl, ExecChgresInc, + Cdump) diff --git a/ush/calcinc_gfs.py b/ush/calcinc_gfs.py new file mode 100755 index 0000000000..cb334ac836 --- /dev/null +++ b/ush/calcinc_gfs.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# calcinc_gfs.py +# cory.r.martin@noaa.gov +# 2019-10-10 +# script to run calc_increment_ens.x to produce +# increment from background and analysis file difference +import os +import shutil +import subprocess +import sys +import gsi_utils +from collections import OrderedDict + +# main function + + +def calcinc_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, IAUHrs, + NThreads, IMP_Physics, Inc2Zero, RunDir, Exec, ExecCMD): + # run the calc_increment_ens executable + + # copy and link files + if DoIAU and l4DEnsVar and Write4Danl: + nFH = 0 + for fh in IAUHrs: + nFH += 1 + if fh == 6: + gsi_utils.link_file('sigf06', 'atmges_mem' + format(nFH, '03')) + gsi_utils.link_file('siganl', 'atmanl_mem' + format(nFH, '03')) + gsi_utils.link_file(ComOut + '/' + APrefix + 'atminc.nc', 'atminc_mem' + format(nFH, '03')) + else: + gsi_utils.link_file('sigf' + format(fh, '02'), 'atmges_mem' + format(nFH, '03')) + gsi_utils.link_file('siga' + format(fh, '02'), 'atmanl_mem' + format(nFH, '03')) + gsi_utils.link_file(ComOut + '/' + APrefix + 'atmi' + format(fh, '03') + '.nc', 'atminc_mem' + format(nFH, '03')) + else: + nFH = 1 + gsi_utils.link_file('sigf06', 'atmges_mem001') + gsi_utils.link_file('siganl', 'atmanl_mem001') + gsi_utils.link_file(ComOut + '/' + APrefix + 'atminc', 'atminc_mem001') + os.environ['OMP_NUM_THREADS'] = str(NThreads) + os.environ['ncmd'] = str(nFH) + shutil.copy(Exec, RunDir + '/calc_inc.x') + ExecCMD = ExecCMD.replace("$ncmd", str(nFH)) + + # set up the namelist + namelist = OrderedDict() + namelist["setup"] = {"datapath": "'./'", + "analysis_filename": "'atmanl'", + "firstguess_filename": "'atmges'", + "increment_filename": "'atminc'", + "debug": ".false.", + "nens": str(nFH), + "imp_physics": str(IMP_Physics)} + + namelist["zeroinc"] = {"incvars_to_zero": Inc2Zero} + + gsi_utils.write_nml(namelist, RunDir + '/calc_increment.nml') + + # run the executable + try: + err = subprocess.check_call(ExecCMD + ' ' + RunDir + '/calc_inc.x', shell=True) + print(locals()) + except subprocess.CalledProcessError as e: + print('Error with calc_inc.x, exit code=' + str(e.returncode)) + print(locals()) + sys.exit(e.returncode) + + +# run the function if this script is called from the command line +if __name__ == '__main__': + DoIAU = gsi_utils.isTrue(os.getenv('DOIAU', 'NO')) + l4DEnsVar = gsi_utils.isTrue(os.getenv('l4densvar', 'NO')) + Write4Danl = gsi_utils.isTrue(os.getenv('lwrite4danl', 'NO')) + ComOut = os.getenv('COM_ATMOS_ANALYSIS', './') + APrefix = os.getenv('APREFIX', '') + NThreads = os.getenv('NTHREADS_CALCINC', 1) + IMP_Physics = os.getenv('imp_physics', 11) + RunDir = os.getenv('DATA', './') + ExecNC = os.getenv('CALCINCNCEXEC', './calc_increment_ens_ncio.x') + Inc2Zero = os.getenv('INCREMENTS_TO_ZERO', '"NONE"') + ExecCMD = os.getenv('APRUN_CALCINC', '') + IAUHrs = list(map(int, os.getenv('IAUFHRS', '6').split(','))) + + Exec = ExecNC + + print(locals()) + calcinc_gfs(DoIAU, l4DEnsVar, Write4Danl, ComOut, APrefix, IAUHrs, + NThreads, IMP_Physics, Inc2Zero, RunDir, Exec, ExecCMD) diff --git a/ush/compare_f90nml.py b/ush/compare_f90nml.py new file mode 100755 index 0000000000..f3c5573a92 --- /dev/null +++ b/ush/compare_f90nml.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 + +import json +import f90nml +from typing import Dict +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + + +def get_dict_from_nml(filename: str) -> Dict: + """ + Read a F90 namelist and convert to a dictionary. + This method uses json to convert OrderedDictionary into regular dictionary + Parameters + ---------- + filename: str + Name of the F90 namelist + Returns + ------- + dictionary: Dict + F90 namelist returned as a dictionary + """ + return json.loads(json.dumps(f90nml.read(filename).todict())) + + +def compare_dicts(dict1: Dict, dict2: Dict, path: str = "") -> None: + """ + Compare 2 dictionaries. + This is done by looping over keys in dictionary 1 and searching for them + in dictionary 2. + If a matching key is found, the values are compared. + If a matching key is not found, it is set to as UNDEFINED. + Note: A reverse match is not performed in this method. For reverse matching, use the -r option in the main driver. + Note: This is a recursive method to handle nested dictionaries. + Parameters + ---------- + dict1: Dict + First dictionary + dict2: Dict + Second dictionary + path: str (optional) + default: "" + key (if nested dictionary) + Returns + ------- + None + """ + + result = dict() + for kk in dict1.keys(): # Loop over all keys of first dictionary + if kk in dict2.keys(): # kk is present in dict2 + if isinstance(dict1[kk], dict): # nested dictionary, go deeper + compare_dicts(dict1[kk], dict2[kk], path=kk) + else: + if dict1[kk] != dict2[kk]: + if path not in result: + result[path] = dict() + result[path][kk] = [dict1[kk], dict2[kk]] + else: # kk is *not* present in dict2 + tt = path if path else kk + if tt not in result: + result[tt] = dict() + result[tt][kk] = [dict1[kk], 'UNDEFINED'] + + def _print_diffs(diff_dict: Dict) -> None: + """ + Print the differences between the two dictionaries to stdout + Parameters + ---------- + diff_dict: Dict + Dictionary containing differences + Returns + ------- + None + """ + for path in diff_dict.keys(): + print(f"{path}:") + max_len = len(max(diff_dict[path], key=len)) + for kk in diff_dict[path].keys(): + items = diff_dict[path][kk] + print( + f"{kk:>{max_len+2}} : {' | '.join(map(str, diff_dict[path][kk]))}") + + _print_diffs(result) + + +if __name__ == "__main__": + + parser = ArgumentParser( + description=("Compare two Fortran namelists and display differences (left_namelist - right_namelist)"), + formatter_class=ArgumentDefaultsHelpFormatter) + parser.add_argument('left_namelist', type=str, help="Left namelist to compare") + parser.add_argument('right_namelist', type=str, help="Right namelist to compare") + parser.add_argument('-r', '--reverse', help='reverse diff (right_namelist - left_namelist)', + action='store_true', required=False) + args = parser.parse_args() + + nml1, nml2 = args.left_namelist, args.right_namelist + if args.reverse: + nml2, nml1 = nml1, nml2 + + dict1 = get_dict_from_nml(nml1) + dict2 = get_dict_from_nml(nml2) + + msg = f"comparing: {nml1} | {nml2}" + print(msg) + print("-" * len(msg)) + compare_dicts(dict1, dict2) diff --git a/ush/cplvalidate.sh b/ush/cplvalidate.sh index 29db7b3ad9..754fa72102 100755 --- a/ush/cplvalidate.sh +++ b/ush/cplvalidate.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## This script validates $confignamevarfornems diff --git a/ush/detect_machine.sh b/ush/detect_machine.sh new file mode 100755 index 0000000000..647722b7a3 --- /dev/null +++ b/ush/detect_machine.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +# First detect w/ hostname +case $(hostname -f) in + + adecflow0[12].acorn.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### acorn + alogin0[12].acorn.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### acorn + clogin0[1-9].cactus.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### cactus01-9 + clogin10.cactus.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### cactus10 + dlogin0[1-9].dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood01-9 + dlogin10.dogwood.wcoss2.ncep.noaa.gov) MACHINE_ID=wcoss2 ;; ### dogwood10 + + gaea9) MACHINE_ID=gaea ;; ### gaea9 + gaea1[0-6]) MACHINE_ID=gaea ;; ### gaea10-16 + gaea9.ncrc.gov) MACHINE_ID=gaea ;; ### gaea9 + gaea1[0-6].ncrc.gov) MACHINE_ID=gaea ;; ### gaea10-16 + + hfe0[1-9]) MACHINE_ID=hera ;; ### hera01-9 + hfe1[0-2]) MACHINE_ID=hera ;; ### hera10-12 + hecflow01) MACHINE_ID=hera ;; ### heraecflow01 + + s4-submit.ssec.wisc.edu) MACHINE_ID=s4 ;; ### s4 + + fe[1-8]) MACHINE_ID=jet ;; ### jet01-8 + tfe[12]) MACHINE_ID=jet ;; ### tjet1-2 + + Orion-login-[1-4].HPC.MsState.Edu) MACHINE_ID=orion ;; ### orion1-4 + + cheyenne[1-6].cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 + cheyenne[1-6].ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 + chadmin[1-6].ib0.cheyenne.ucar.edu) MACHINE_ID=cheyenne ;; ### cheyenne1-6 + + login[1-4].stampede2.tacc.utexas.edu) MACHINE_ID=stampede ;; ### stampede1-4 + + login0[1-2].expanse.sdsc.edu) MACHINE_ID=expanse ;; ### expanse1-2 + + discover3[1-5].prv.cube) MACHINE_ID=discover ;; ### discover31-35 + *) MACHINE_ID=UNKNOWN ;; # Unknown platform +esac + +# Overwrite auto-detect with MACHINE if set +MACHINE_ID=${MACHINE:-${MACHINE_ID}} + +# If MACHINE_ID is no longer UNKNNOWN, return it +if [[ "${MACHINE_ID}" != "UNKNOWN" ]]; then + return +fi + +# Try searching based on paths since hostname may not match on compute nodes +if [[ -d /lfs/f1 ]] ; then + # We are on NOAA Cactus or Dogwood + MACHINE_ID=wcoss2 +elif [[ -d /mnt/lfs1 ]] ; then + # We are on NOAA Jet + MACHINE_ID=jet +elif [[ -d /scratch1 ]] ; then + # We are on NOAA Hera + MACHINE_ID=hera +elif [[ -d /work ]] ; then + # We are on MSU Orion + MACHINE_ID=orion +elif [[ -d /glade ]] ; then + # We are on NCAR Yellowstone + MACHINE_ID=cheyenne +elif [[ -d /lustre && -d /ncrc ]] ; then + # We are on GAEA. + MACHINE_ID=gaea +elif [[ -d /data/prod ]] ; then + # We are on SSEC's S4 + MACHINE_ID=s4 +else + echo WARNING: UNKNOWN PLATFORM 1>&2 +fi diff --git a/ush/drive_makeprepbufr.sh b/ush/drive_makeprepbufr.sh deleted file mode 100755 index ad36904724..0000000000 --- a/ush/drive_makeprepbufr.sh +++ /dev/null @@ -1,138 +0,0 @@ -#!/bin/sh -x -############################################################### -# < next few lines under version control, D O N O T E D I T > -# $Date$ -# $Revision$ -# $Author$ -# $Id$ -############################################################### - -############################################################### -## Author: Rahul Mahajan Org: NCEP/EMC Date: April 2017 - -## Abstract: -## Prepare for analysis driver script -## EXPDIR : /full/path/to/config/files -## CDATE : current analysis date (YYYYMMDDHH) -## CDUMP : cycle name (gdas / gfs) -############################################################### - -############################################################### -# Source relevant configs -configs="base prep prepbufr" -for config in $configs; do - . $EXPDIR/config.${config} - status=$? - [[ $status -ne 0 ]] && exit $status -done - -############################################################### -# Source machine runtime environment -. $BASE_ENV/${machine}.env prepbufr -status=$? -[[ $status -ne 0 ]] && exit $status - -############################################################### -KEEPDATA=${KEEPDATA:-"NO"} -DO_RELOCATE=${DO_RELOCATE:-"NO"} -DONST=${DONST:-"NO"} - -############################################################### -# Set script and dependency variables -export COMPONENT=${COMPONENT:-atmos} - -GDATE=$($NDATE -$assim_freq $CDATE) - -cymd=$(echo $CDATE | cut -c1-8) -chh=$(echo $CDATE | cut -c9-10) -gymd=$(echo $GDATE | cut -c1-8) -ghh=$(echo $GDATE | cut -c9-10) - -OPREFIX="${CDUMP}.t${chh}z." -OSUFFIX=".bufr_d" -GPREFIX="gdas.t${ghh}z." -GSUFFIX=${GSUFFIX:-$SUFFIX} -APREFIX="${CDUMP}.t${chh}z." -ASUFFIX=${ASUFFIX:-$SUFFIX} - -COMIN_OBS=${COMIN_OBS:-"$DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}"} -COMIN_GES=${COMIN_GES:-"$ROTDIR/gdas.$gymd/$ghh/$COMPONENT"} -COMOUT=${COMOUT:-"$ROTDIR/$CDUMP.$cymd/$chh/$COMPONENT"} -[[ ! -d $COMOUT ]] && mkdir -p $COMOUT -export DATA="$RUNDIR/$CDATE/$CDUMP/prepbufr" -[[ -d $DATA ]] && rm -rf $DATA -mkdir -p $DATA -cd $DATA - -############################################################### -# MAKEPREPBUFRSH environment specific variables -export NEMSIO_IN=".true." -export COMSP="$DATA/" -export NET=$CDUMP - -############################################################### -# Link observation files in BUFRLIST -for bufrname in $BUFRLIST; do - $NLN $COMIN_OBS/${OPREFIX}${bufrname}.tm00$OSUFFIX ${bufrname}.tm00$OSUFFIX -done - -# Link first guess files -$NLN $COMIN_GES/${GPREFIX}atmf003${GSUFFIX} ./atmgm3$GSUFFIX -$NLN $COMIN_GES/${GPREFIX}atmf006${GSUFFIX} ./atmges$GSUFFIX -$NLN $COMIN_GES/${GPREFIX}atmf009${GSUFFIX} ./atmgp3$GSUFFIX - -[[ -f $COMIN_GES/${GPREFIX}atmf004${GSUFFIX} ]] && $NLN $COMIN_GES/${GPREFIX}atmf004${GSUFFIX} ./atmgm2$GSUFFIX -[[ -f $COMIN_GES/${GPREFIX}atmf005${GSUFFIX} ]] && $NLN $COMIN_GES/${GPREFIX}atmf005${GSUFFIX} ./atmgm1$GSUFFIX -[[ -f $COMIN_GES/${GPREFIX}atmf007${GSUFFIX} ]] && $NLN $COMIN_GES/${GPREFIX}atmf007${GSUFFIX} ./atmgp1$GSUFFIX -[[ -f $COMIN_GES/${GPREFIX}atmf008${GSUFFIX} ]] && $NLN $COMIN_GES/${GPREFIX}atmf008${GSUFFIX} ./atmgp2$GSUFFIX - -# If relocation is turned off: these files don't exist, touch them -if [ $DO_RELOCATE = "NO" ]; then - touch $DATA/tcvitals.relocate.tm00 - touch $DATA/tropcy_relocation_status.tm00 - echo "RECORDS PROCESSED" >> $DATA/tropcy_relocation_status.tm00 -fi - -############################################################### -# if PREPDATA is YES and -# 1. the aircft bufr file is not found, set PREPACQC to NO -# 2. the ****** bufr file is not found, set ******** to NO -if [ $PREPDATA = "YES" ]; then - [[ ! -s aircft.tm00$OSUFFIX ]] && export PREPACQC="NO" -fi - -############################################################### -# Execute MAKEPREPBUFRSH - -echo $(date) EXECUTING $MAKEPREPBUFRSH $CDATE >&2 -$MAKEPREPBUFRSH $CDATE -status=$? -echo $(date) EXITING $MAKEPREPBUFRSH with return code $status >&2 -[[ $status -ne 0 ]] && exit $status - -############################################################### -# Create nsstbufr file -if [ $DONST = "YES" ]; then - SFCSHPBF=${SFCSHPBF:-$COMIN_OBS/sfcshp.$CDUMP.$CDATE} - TESACBF=${TESACBF:-$COMIN_OBS/tesac.$CDUMP.$CDATE} - BATHYBF=${BATHYBF:-$COMIN_OBS/bathy.$CDUMP.$CDATE} - TRKOBBF=${TRKOBBF:-$COMIN_OBS/trkob.$CDUMP.$CDATE} - NSSTBF=${NSSTBF:-$COMOUT/${APREFIX}nsstbufr} - - cat $SFCSHPBF $TESACBF $BATHYBF $TRKOBBF > $NSSTBF - status=$? - echo $(date) CREATE $NSSTBF with return code $status >&2 - - # NSST bufr file must be restricted since it contains unmasked ship ids - chmod 640 $NSSTBF - $CHGRP_CMD $NSSTBF -fi -############################################################### -# Copy prepbufr and prepbufr.acft_profiles to COMOUT -$NCP $DATA/prepda.t${chh}z $COMOUT/${APREFIX}prepbufr -$NCP $DATA/prepbufr.acft_profiles $COMOUT/${APREFIX}prepbufr.acft_profiles - -############################################################### -# Exit out cleanly -if [ $KEEPDATA = "NO" ] ; then rm -rf $DATA ; fi -exit 0 diff --git a/ush/file_utils.sh b/ush/file_utils.sh new file mode 100644 index 0000000000..544a270b0a --- /dev/null +++ b/ush/file_utils.sh @@ -0,0 +1,27 @@ +#! /usr/bin/env bash + +nb_copy() { + # + # TODO - Extend this to take multiple arguments for file_in (like cp) + # + # Copy a file if it exists, print a warning otherwise but don't + # error. + # + # Syntax + # nb_copy file_in file_out + # + # Arguments + # file_in: the file to copy + # file_out: the destination of the copy + # + # Environment variables + # NCP: Command to use to copy (default: cp) + # + local file_in="${1}" + local file_out="${2}" + if [[ -f ${file_in} ]]; then + ${NCP:-cp} ${file_in} ${file_out} + else + echo "WARNING: No file ${file_in} found (pwd: $(pwd))" + fi +} diff --git a/ush/forecast_det.sh b/ush/forecast_det.sh index 5529ce8a83..06329e0762 100755 --- a/ush/forecast_det.sh +++ b/ush/forecast_det.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## "forecast_det.sh" @@ -8,7 +8,6 @@ ## This script is a definition of functions. ##### - # For all non-evironment variables # Cycling and forecast hour specific parameters @@ -20,54 +19,61 @@ FV3_GFS_det(){ res_latlon_dynamics="''" # Determine if this is a warm start or cold start - if [ -f $gmemdir/RESTART/${sPDY}.${scyc}0000.coupler.res ]; then + if [[ -f "${COM_ATMOS_RESTART_PREV}/${sPDY}.${scyc}0000.coupler.res" ]]; then export warm_start=".true." fi # turn IAU off for cold start DOIAU_coldstart=${DOIAU_coldstart:-"NO"} - if [ $DOIAU = "YES" -a $warm_start = ".false." ] || [ $DOIAU_coldstart = "YES" -a $warm_start = ".true." ]; then + if [ ${DOIAU} = "YES" -a ${warm_start} = ".false." ] || [ ${DOIAU_coldstart} = "YES" -a ${warm_start} = ".true." ]; then export DOIAU="NO" - echo "turning off IAU since warm_start = $warm_start" + echo "turning off IAU since warm_start = ${warm_start}" DOIAU_coldstart="YES" IAU_OFFSET=0 - sCDATE=$CDATE - sPDY=$PDY - scyc=$cyc - tPDY=$sPDY - tcyc=$cyc + sCDATE=${CDATE} + sPDY=${PDY} + scyc=${cyc} + tPDY=${sPDY} + tcyc=${cyc} fi #------------------------------------------------------- # determine if restart IC exists to continue from a previous forecast - RERUN="NO" - filecount=$(find $RSTDIR_ATM -type f | wc -l) - if [ $CDUMP = "gfs" -a $rst_invt1 -gt 0 -a $FHMAX -gt $rst_invt1 -a $filecount -gt 10 ]; then + RERUN=${RERUN:-"NO"} + filecount=$(find "${COM_ATMOS_RESTART:-/dev/null}" -type f | wc -l) + if [[ ( ${CDUMP} = "gfs" || ( ${RUN} = "gefs" && ${CDATE_RST} = "" )) && ${rst_invt1} -gt 0 && ${FHMAX} -gt ${rst_invt1} && ${filecount} -gt 10 ]]; then reverse=$(echo "${restart_interval[@]} " | tac -s ' ') - for xfh in $reverse ; do + for xfh in ${reverse} ; do yfh=$((xfh-(IAU_OFFSET/2))) - SDATE=$($NDATE +$yfh $CDATE) - PDYS=$(echo $SDATE | cut -c1-8) - cycs=$(echo $SDATE | cut -c9-10) - flag1=$RSTDIR_ATM/${PDYS}.${cycs}0000.coupler.res - flag2=$RSTDIR_ATM/coupler.res - if [ -s $flag1 ]; then - CDATE_RST=$SDATE - [[ $RERUN = "YES" ]] && break - mv $flag1 ${flag1}.old - if [ -s $flag2 ]; then mv $flag2 ${flag2}.old ;fi + SDATE=$(${NDATE} ${yfh} "${CDATE}") + PDYS=$(echo "${SDATE}" | cut -c1-8) + cycs=$(echo "${SDATE}" | cut -c9-10) + flag1=${COM_ATMOS_RESTART}/${PDYS}.${cycs}0000.coupler.res + flag2=${COM_ATMOS_RESTART}/coupler.res + + #make sure that the wave restart files also exist if cplwav=true + waverstok=".true." + if [[ "${cplwav}" = ".true." ]]; then + for wavGRD in ${waveGRD} ; do + if [[ ! -f "${COM_WAVE_RESTART}/${PDYS}.${cycs}0000.restart.${wavGRD}" ]]; then + waverstok=".false." + fi + done + fi + + if [[ -s "${flag1}" ]] && [[ ${waverstok} = ".true." ]]; then + CDATE_RST=${SDATE} + [[ ${RERUN} = "YES" ]] && break + mv "${flag1}" "${flag1}.old" + if [[ -s "${flag2}" ]]; then mv "${flag2}" "${flag2}.old" ;fi RERUN="YES" - [[ $xfh = $rst_invt1 ]] && RERUN="NO" + [[ ${xfh} = ${rst_invt1} ]] && RERUN="NO" fi done fi #------------------------------------------------------- } -FV3_GEFS_det(){ - echo "SUB ${FUNCNAME[0]}: Defining variables for FV3GEFS" -} - WW3_det(){ echo "SUB ${FUNCNAME[0]}: Run type determination for WW3" } diff --git a/ush/forecast_postdet.sh b/ush/forecast_postdet.sh index a79821b03c..adce9f696c 100755 --- a/ush/forecast_postdet.sh +++ b/ush/forecast_postdet.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## This script defines functions for data I/O and namelist. @@ -11,11 +11,6 @@ ## for execution. ##### -FV3_GEFS_postdet(){ - echo SUB ${FUNCNAME[0]}: Linking input data for FV3 $RUN - # soft link commands insert here -} - DATM_postdet(){ ###################################################################### # Link DATM inputs (ie forcing files) # @@ -45,26 +40,25 @@ FV3_GFS_postdet(){ if [ $RERUN = "NO" ]; then #............................. - # Link all (except sfc_data) restart files from $gmemdir - for file in $(ls $gmemdir/RESTART/${sPDY}.${scyc}0000.*.nc); do + # Link all restart files from previous cycle + for file in "${COM_ATMOS_RESTART_PREV}/${sPDY}.${scyc}0000."*.nc; do file2=$(echo $(basename $file)) file2=$(echo $file2 | cut -d. -f3-) # remove the date from file fsuf=$(echo $file2 | cut -d. -f1) - if [ $fsuf != "sfc_data" ]; then - $NLN $file $DATA/INPUT/$file2 - fi + $NLN $file $DATA/INPUT/$file2 done - # Link sfcanl_data restart files from $memdir - for file in $(ls $memdir/RESTART/${sPDY}.${scyc}0000.*.nc); do - file2=$(echo $(basename $file)) - file2=$(echo $file2 | cut -d. -f3-) # remove the date from file - fsufanl=$(echo $file2 | cut -d. -f1) - if [ $fsufanl = "sfcanl_data" ]; then + # Replace sfc_data with sfcanl_data restart files from current cycle (if found) + if [ "${MODE}" = "cycled" ] && [ "${CCPP_SUITE}" = "FV3_GFS_v16" ]; then # TODO: remove if statement when global_cycle can handle NOAHMP + for file in "${COM_ATMOS_RESTART}/${sPDY}.${scyc}0000."*.nc; do + file2=$(echo $(basename $file)) + file2=$(echo $file2 | cut -d. -f3-) # remove the date from file + fsufanl=$(echo $file2 | cut -d. -f1) file2=$(echo $file2 | sed -e "s/sfcanl_data/sfc_data/g") + rm -f $DATA/INPUT/$file2 $NLN $file $DATA/INPUT/$file2 - fi - done + done + fi # Need a coupler.res when doing IAU if [ $DOIAU = "YES" ]; then @@ -81,9 +75,9 @@ EOF for i in $(echo $IAUFHRS | sed "s/,/ /g" | rev); do incfhr=$(printf %03i $i) if [ $incfhr = "006" ]; then - increment_file=$memdir/${CDUMP}.t${cyc}z.${PREFIX_ATMINC}atminc.nc + increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atminc.nc" else - increment_file=$memdir/${CDUMP}.t${cyc}z.${PREFIX_ATMINC}atmi${incfhr}.nc + increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atmi${incfhr}.nc" fi if [ ! -f $increment_file ]; then echo "ERROR: DOIAU = $DOIAU, but missing increment file for fhr $incfhr at $increment_file" @@ -91,12 +85,12 @@ EOF exit 1 fi $NLN $increment_file $DATA/INPUT/fv_increment$i.nc - IAU_INC_FILES="'fv_increment$i.nc',$IAU_INC_FILES" + IAU_INC_FILES="'fv_increment$i.nc',${IAU_INC_FILES:-}" done read_increment=".false." res_latlon_dynamics="" else - increment_file=$memdir/${CDUMP}.t${cyc}z.${PREFIX_ATMINC}atminc.nc + increment_file="${COM_ATMOS_ANALYSIS}/${RUN}.t${cyc}z.${PREFIX_ATMINC}atminc.nc" if [ -f $increment_file ]; then $NLN $increment_file $DATA/INPUT/fv3_increment.nc read_increment=".true." @@ -109,7 +103,7 @@ EOF export warm_start=".true." PDYT=$(echo $CDATE_RST | cut -c1-8) cyct=$(echo $CDATE_RST | cut -c9-10) - for file in $(ls $RSTDIR_ATM/${PDYT}.${cyct}0000.*); do + for file in "${COM_ATMOS_RESTART}/${PDYT}.${cyct}0000."*; do file2=$(echo $(basename $file)) file2=$(echo $file2 | cut -d. -f3-) $NLN $file $DATA/INPUT/$file2 @@ -134,7 +128,7 @@ EOF #............................. else ## cold start - for file in $(ls $memdir/INPUT/*.nc); do + for file in "${COM_ATMOS_INPUT}/"*.nc; do file2=$(echo $(basename $file)) fsuf=$(echo $file2 | cut -c1-3) if [ $fsuf = "gfs" -o $fsuf = "sfc" ]; then @@ -144,16 +138,10 @@ EOF fi - if [ $machine = 'sandbox' ]; then - echo SUB ${FUNCNAME[0]}: Checking initial condition, overriden in sandbox mode! - else - nfiles=$(ls -1 $DATA/INPUT/* | wc -l) - if [ $nfiles -le 0 ]; then - echo SUB ${FUNCNAME[0]}: Initial conditions must exist in $DATA/INPUT, ABORT! - msg="SUB ${FUNCNAME[0]}: Initial conditions must exist in $DATA/INPUT, ABORT!" - postmsg "$jlogfile" "$msg" - exit 1 - fi + nfiles=$(ls -1 $DATA/INPUT/* | wc -l) + if [ $nfiles -le 0 ]; then + echo SUB ${FUNCNAME[0]}: Initial conditions must exist in $DATA/INPUT, ABORT! + exit 1 fi # If doing IAU, change forecast hours @@ -166,10 +154,6 @@ EOF #-------------------------------------------------------------------------- # Grid and orography data - for n in $(seq 1 $ntiles); do - $NLN $FIXfv3/$CASE/${CASE}_grid.tile${n}.nc $DATA/INPUT/${CASE}_grid.tile${n}.nc - $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile${n}.nc $DATA/INPUT/oro_data.tile${n}.nc - done if [ $cplflx = ".false." ] ; then $NLN $FIXfv3/$CASE/${CASE}_mosaic.nc $DATA/INPUT/grid_spec.nc @@ -177,20 +161,12 @@ EOF $NLN $FIXfv3/$CASE/${CASE}_mosaic.nc $DATA/INPUT/${CASE}_mosaic.nc fi - # Fractional grid related - if [ $FRAC_GRID = ".true." ]; then - OROFIX=${OROFIX:-"${FIX_DIR}/fix_fv3_fracoro/${CASE}.mx${OCNRES}_frac"} - FIX_SFC=${FIX_SFC:-"${OROFIX}/fix_sfc"} - for n in $(seq 1 $ntiles); do - $NLN ${OROFIX}/oro_${CASE}.mx${OCNRES}.tile${n}.nc $DATA/INPUT/oro_data.tile${n}.nc - done - else - OROFIX=${OROFIX:-"${FIXfv3}/${CASE}"} - FIX_SFC=${FIX_SFC:-"${OROFIX}/fix_sfc"} - for n in $(seq 1 $ntiles); do - $NLN ${OROFIX}/${CASE}_oro_data.tile${n}.nc $DATA/INPUT/oro_data.tile${n}.nc - done - fi + OROFIX=${OROFIX:-"${FIX_DIR}/orog/${CASE}.mx${OCNRES}_frac"} + FIX_SFC=${FIX_SFC:-"${OROFIX}/fix_sfc"} + for n in $(seq 1 $ntiles); do + $NLN ${OROFIX}/oro_${CASE}.mx${OCNRES}.tile${n}.nc $DATA/INPUT/oro_data.tile${n}.nc + $NLN ${OROFIX}/${CASE}_grid.tile${n}.nc $DATA/INPUT/${CASE}_grid.tile${n}.nc + done export CCPP_SUITE=${CCPP_SUITE:-"FV3_GFS_v16"} _suite_file=$HOMEgfs/sorc/ufs_model.fd/FV3/ccpp/suites/suite_${CCPP_SUITE}.xml @@ -200,7 +176,7 @@ EOF exit 2 fi - # Scan suite file to determine whether it uses Noah-MP ( Noah-MP #2, RUC-LSM #3, Noah #1 ) + # Scan suite file to determine whether it uses Noah-MP if [ $(grep noahmpdrv ${_suite_file} | wc -l ) -gt 0 ]; then lsm="2" lheatstrg=".false." @@ -219,19 +195,10 @@ EOF iopt_stc=${iopt_stc:-"3"} IALB=${IALB:-2} IEMS=${IEMS:-2} - elif [ $(grep lsm_ruc ${_suite_file} | wc -l ) -gt 0 ]; then - lsm="3" - lsoil_lsm=9 - lheatstrg=".false." - landice=".false." else lsm="1" lheatstrg=".true." - if [[ "$CCPP_SUITE" == "FV3_RAP_cires_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_unified_ugwp" || "$CCPP_SUITE" == "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then ## JKH - landice=".false." - else - landice=".true." - fi + landice=".true." iopt_dveg=${iopt_dveg:-"1"} iopt_crs=${iopt_crs:-"1"} iopt_btr=${iopt_btr:-"1"} @@ -249,11 +216,8 @@ EOF fi # Files for GWD - OROFIX_ugwd=${OROFIX_ugwd:-"${FIX_DIR}/fix_ugwd"} - if [[ "$CCPP_SUITE" != "FV3_RAP_cires_ugwp" && "$CCPP_SUITE" != "FV3_RAP_noah_sfcdiff_unified_ugwp" && "$CCPP_SUITE" != "FV3_RAP_noah_sfcdiff_ugwpv1" ]] ; then ## JKH - - $NLN ${OROFIX_ugwd}/ugwp_limb_tau.nc $DATA/ugwp_limb_tau.nc - fi + OROFIX_ugwd=${OROFIX_ugwd:-"${FIX_DIR}/ugwd"} + $NLN ${OROFIX_ugwd}/ugwp_limb_tau.nc $DATA/ugwp_limb_tau.nc for n in $(seq 1 $ntiles); do $NLN ${OROFIX_ugwd}/$CASE/${CASE}_oro_data_ls.tile${n}.nc $DATA/INPUT/oro_data_ls.tile${n}.nc $NLN ${OROFIX_ugwd}/$CASE/${CASE}_oro_data_ss.tile${n}.nc $DATA/INPUT/oro_data_ss.tile${n}.nc @@ -279,8 +243,8 @@ EOF if [ $imp_physics -eq 8 ]; then $NLN $FIX_AM/CCN_ACTIVATE.BIN $DATA/CCN_ACTIVATE.BIN $NLN $FIX_AM/freezeH2O.dat $DATA/freezeH2O.dat - $NLN $FIX_AM/qr_acr_qgV2.dat $DATA/qr_acr_qgV2.dat - $NLN $FIX_AM/qr_acr_qsV2.dat $DATA/qr_acr_qsV2.dat + $NLN $FIX_AM/qr_acr_qgV2.dat $DATA/qr_acr_qgV2.dat + $NLN $FIX_AM/qr_acr_qsV2.dat $DATA/qr_acr_qsV2.dat fi $NLN $FIX_AM/${O3FORC} $DATA/global_o3prdlos.f77 @@ -290,12 +254,12 @@ EOF ## merra2 aerosol climo if [ $IAER -eq "1011" ]; then - FIX_AER="${FIX_DIR}/fix_aer" + FIX_AER="${FIX_DIR}/aer" for month in $(seq 1 12); do MM=$(printf %02d $month) $NLN "${FIX_AER}/merra2.aerclim.2003-2014.m${MM}.nc" "aeroclim.m${MM}.nc" done - FIX_LUT="${FIX_DIR}/fix_lut" + FIX_LUT="${FIX_DIR}/lut" $NLN $FIX_LUT/optics_BC.v1_3.dat $DATA/optics_BC.dat $NLN $FIX_LUT/optics_OC.v1_3.dat $DATA/optics_OC.dat $NLN $FIX_LUT/optics_DU.v15_3.dat $DATA/optics_DU.dat @@ -321,9 +285,9 @@ EOF # inline post fix files if [ $WRITE_DOPOST = ".true." ]; then $NLN $PARM_POST/post_tag_gfs${LEVS} $DATA/itag - $NLN $PARM_POST/postxconfig-NT-GFS-TWO.txt $DATA/postxconfig-NT.txt - $NLN $PARM_POST/postxconfig-NT-GFS-F00-TWO.txt $DATA/postxconfig-NT_FH00.txt - $NLN $PARM_POST/params_grib2_tbl_new $DATA/params_grib2_tbl_new + $NLN ${FLTFILEGFS:-$PARM_POST/postxconfig-NT-GFS-TWO.txt} $DATA/postxconfig-NT.txt + $NLN ${FLTFILEGFSF00:-$PARM_POST/postxconfig-NT-GFS-F00-TWO.txt} $DATA/postxconfig-NT_FH00.txt + $NLN ${POSTGRB2TBL:-$PARM_POST/params_grib2_tbl_new} $DATA/params_grib2_tbl_new fi #------------------------------------------------------------------ @@ -355,7 +319,7 @@ EOF FNTSFC=${FNTSFC:-"$FIX_AM/RTGSST.1982.2012.monthly.clim.grb"} FNSNOC=${FNSNOC:-"$FIX_AM/global_snoclim.1.875.grb"} FNZORC=${FNZORC:-"igbp"} - FNAISC=${FNAISC:-"$FIX_AM/CFSR.SEAICE.1982.2012.monthly.clim.grb"} + FNAISC=${FNAISC:-"$FIX_AM/IMS-NIC.blended.ice.monthly.clim.grb"} FNALBC2=${FNALBC2:-"${FIX_SFC}/${CASE}.facsf.tileX.nc"} FNTG3C=${FNTG3C:-"${FIX_SFC}/${CASE}.substrate_temperature.tileX.nc"} FNVEGC=${FNVEGC:-"${FIX_SFC}/${CASE}.vegetation_greenness.tileX.nc"} @@ -515,29 +479,24 @@ EOF JCAP_STP=${JCAP_STP:-$JCAP_CASE} LONB_STP=${LONB_STP:-$LONB_CASE} LATB_STP=${LATB_STP:-$LATB_CASE} - cd $DATA - - affix="nc" - if [ "$OUTPUT_FILE" = "nemsio" ]; then - affix="nemsio" - fi - + if [[ ! -d ${COM_ATMOS_HISTORY} ]]; then mkdir -p ${COM_ATMOS_HISTORY}; fi + if [[ ! -d ${COM_ATMOS_MASTER} ]]; then mkdir -p ${COM_ATMOS_MASTER}; fi if [ $QUILTING = ".true." -a $OUTPUT_GRID = "gaussian_grid" ]; then fhr=$FHMIN for fhr in $OUTPUT_FH; do FH3=$(printf %03i $fhr) FH2=$(printf %02i $fhr) - atmi=atmf${FH3}.$affix - sfci=sfcf${FH3}.$affix - logi=logf${FH3} + atmi=atmf${FH3}.nc + sfci=sfcf${FH3}.nc + logi=log.atm.f${FH3} pgbi=GFSPRS.GrbF${FH2} flxi=GFSFLX.GrbF${FH2} - atmo=$memdir/${CDUMP}.t${cyc}z.atmf${FH3}.$affix - sfco=$memdir/${CDUMP}.t${cyc}z.sfcf${FH3}.$affix - logo=$memdir/${CDUMP}.t${cyc}z.logf${FH3}.txt - pgbo=$memdir/${CDUMP}.t${cyc}z.master.grb2f${FH3} - flxo=$memdir/${CDUMP}.t${cyc}z.sfluxgrbf${FH3}.grib2 + atmo=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atmf${FH3}.nc + sfco=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.sfcf${FH3}.nc + logo=${COM_ATMOS_HISTORY}/${RUN}.t${cyc}z.atm.logf${FH3}.txt + pgbo=${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.master.grb2f${FH3} + flxo=${COM_ATMOS_MASTER}/${RUN}.t${cyc}z.sfluxgrbf${FH3}.grib2 eval $NLN $atmo $atmi eval $NLN $sfco $sfci eval $NLN $logo $logi @@ -548,11 +507,11 @@ EOF done else for n in $(seq 1 $ntiles); do - eval $NLN nggps2d.tile${n}.nc $memdir/nggps2d.tile${n}.nc - eval $NLN nggps3d.tile${n}.nc $memdir/nggps3d.tile${n}.nc - eval $NLN grid_spec.tile${n}.nc $memdir/grid_spec.tile${n}.nc - eval $NLN atmos_static.tile${n}.nc $memdir/atmos_static.tile${n}.nc - eval $NLN atmos_4xdaily.tile${n}.nc $memdir/atmos_4xdaily.tile${n}.nc + eval $NLN nggps2d.tile${n}.nc ${COM_ATMOS_HISTORY}/nggps2d.tile${n}.nc + eval $NLN nggps3d.tile${n}.nc ${COM_ATMOS_HISTORY}/nggps3d.tile${n}.nc + eval $NLN grid_spec.tile${n}.nc ${COM_ATMOS_HISTORY}/grid_spec.tile${n}.nc + eval $NLN atmos_static.tile${n}.nc ${COM_ATMOS_HISTORY}/atmos_static.tile${n}.nc + eval $NLN atmos_4xdaily.tile${n}.nc ${COM_ATMOS_HISTORY}/atmos_4xdaily.tile${n}.nc done fi } @@ -560,10 +519,6 @@ EOF FV3_GFS_nml(){ # namelist output for a certain component echo SUB ${FUNCNAME[0]}: Creating name lists and model configure file for FV3 - if [ $machine = 'sandbox' ]; then - cd $SCRIPTDIR - echo "MAIN: !!!Sandbox mode, writing to current directory!!!" - fi # Call child scripts in current script directory source $SCRIPTDIR/parsing_namelists_FV3.sh FV3_namelists @@ -587,16 +542,16 @@ data_out_GFS() { if [ $SEND = "YES" ]; then # Copy model restart files - if [ $CDUMP = "gdas" -a $rst_invt1 -gt 0 ]; then + if [[ ${RUN} =~ "gdas" ]] && (( rst_invt1 > 0 )); then cd $DATA/RESTART - mkdir -p $memdir/RESTART + mkdir -p "${COM_ATMOS_RESTART}" for rst_int in $restart_interval ; do if [ $rst_int -ge 0 ]; then RDATE=$($NDATE +$rst_int $CDATE) rPDY=$(echo $RDATE | cut -c1-8) rcyc=$(echo $RDATE | cut -c9-10) - for file in $(ls ${rPDY}.${rcyc}0000.*) ; do - $NCP $file $memdir/RESTART/$file + for file in "${rPDY}.${rcyc}0000."* ; do + ${NCP} "${file}" "${COM_ATMOS_RESTART}/${file}" done fi done @@ -609,65 +564,85 @@ data_out_GFS() { RDATE=$($NDATE +$rst_iau $CDATE) rPDY=$(echo $RDATE | cut -c1-8) rcyc=$(echo $RDATE | cut -c9-10) - for file in $(ls ${rPDY}.${rcyc}0000.*) ; do - $NCP $file $memdir/RESTART/$file + for file in "${rPDY}.${rcyc}0000."* ; do + ${NCP} "${file}" "${COM_ATMOS_RESTART}/${file}" done fi - elif [ $CDUMP = "gfs" ]; then - $NCP $DATA/input.nml $ROTDIR/${CDUMP}.${PDY}/${cyc}/atmos/ - $NCP $DATA/model_configure $ROTDIR/${CDUMP}.${PDY}/${cyc}/atmos/ # GSL + elif [[ ${RUN} =~ "gfs" ]]; then + ${NCP} "${DATA}/input.nml" "${COM_ATMOS_HISTORY}/input.nml" fi fi echo "SUB ${FUNCNAME[0]}: Output data for FV3 copied" } + WW3_postdet() { echo "SUB ${FUNCNAME[0]}: Linking input data for WW3" COMPONENTwave=${COMPONENTwave:-${RUN}wave} #Link mod_def files for wave grids - array=($WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD $wavesbsGRD $wavepostGRD $waveinterpGRD) - echo "Wave Grids: $WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD $wavesbsGRD $wavepostGRD $waveinterpGRD" - grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ') + if [ $waveMULTIGRID = ".true." ]; then + array=($WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD) + echo "Wave Grids: $WAVECUR_FID $WAVEICE_FID $WAVEWND_FID $waveuoutpGRD $waveGRD $waveesmfGRD" + grdALL=$(printf "%s\n" "${array[@]}" | sort -u | tr '\n' ' ') - for wavGRD in ${grdALL}; do - $NCP $ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/rundata/${COMPONENTwave}.mod_def.$wavGRD $DATA/mod_def.$wavGRD - done + for wavGRD in ${grdALL}; do + ${NCP} "${COM_WAVE_PREP}/${COMPONENTwave}.mod_def.${wavGRD}" "${DATA}/mod_def.${wavGRD}" + done + else + #if shel, only 1 waveGRD which is linked to mod_def.ww3 + ${NCP} "${COM_WAVE_PREP}/${COMPONENTwave}.mod_def.${waveGRD}" "${DATA}/mod_def.ww3" + fi + + + #if wave mesh is not the same as the ocn/ice mesh, linkk it in the file + comparemesh=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"} + if [ "$MESH_WAV" = "$comparemesh" ]; then + echo "Wave is on same mesh as ocean/ice" + else + $NLN -sf $FIXwave/$MESH_WAV $DATA/ + fi - export WAVHCYC=${WAVHCYC:-6} - export WRDATE=$($NDATE -${WAVHCYC} $CDATE) - export WRPDY=$(echo $WRDATE | cut -c1-8) - export WRcyc=$(echo $WRDATE | cut -c9-10) - export WRDIR=${ROTDIR}/${CDUMPRSTwave}.${WRPDY}/${WRcyc}/wave/restart - export RSTDIR_WAVE=$ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/restart - export datwave=$COMOUTwave/rundata - export wavprfx=${CDUMPwave}${WAV_MEMBER} + export wavprfx=${RUNwave}${WAV_MEMBER:-} #Copy initial condition files: for wavGRD in $waveGRD ; do if [ $warm_start = ".true." -o $RERUN = "YES" ]; then if [ $RERUN = "NO" ]; then - waverstfile=${WRDIR}/${sPDY}.${scyc}0000.restart.${wavGRD} + waverstfile=${COM_WAVE_RESTART_PREV}/${sPDY}.${scyc}0000.restart.${wavGRD} else - waverstfile=${RSTDIR_WAVE}/${PDYT}.${cyct}0000.restart.${wavGRD} + waverstfile=${COM_WAVE_RESTART}/${PDYT}.${cyct}0000.restart.${wavGRD} fi else - waverstfile=${RSTDIR_WAVE}/${sPDY}.${scyc}0000.restart.${wavGRD} + waverstfile=${COM_WAVE_RESTART}/${sPDY}.${scyc}0000.restart.${wavGRD} fi if [ ! -f ${waverstfile} ]; then - echo "WARNING: NON-FATAL ERROR wave IC is missing, will start from rest" + if [ $RERUN = "NO" ]; then + echo "WARNING: NON-FATAL ERROR wave IC is missing, will start from rest" + else + echo "ERROR: Wave IC is missing in RERUN, exiting." + exit 1 + fi else - $NLN ${waverstfile} $DATA/restart.${wavGRD} + if [ $waveMULTIGRID = ".true." ]; then + $NLN ${waverstfile} $DATA/restart.${wavGRD} + else + $NLN ${waverstfile} $DATA/restart.ww3 + fi fi - done + done - for wavGRD in $waveGRD ; do - eval $NLN $datwave/${wavprfx}.log.${wavGRD}.${PDY}${cyc} log.${wavGRD} - done + if [ $waveMULTIGRID = ".true." ]; then + for wavGRD in $waveGRD ; do + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${wavGRD}.${PDY}${cyc}" "log.${wavGRD}" + done + else + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.${waveGRD}.${PDY}${cyc}" "log.ww3" + fi if [ "$WW3ICEINP" = "YES" ]; then - wavicefile=$COMINwave/rundata/${CDUMPwave}.${WAVEICE_FID}.${cycle}.ice + wavicefile="${COM_WAVE_PREP}/${RUNwave}.${WAVEICE_FID}.${cycle}.ice" if [ ! -f $wavicefile ]; then echo "ERROR: WW3ICEINP = ${WW3ICEINP}, but missing ice file" echo "Abort!" @@ -677,7 +652,7 @@ WW3_postdet() { fi if [ "$WW3CURINP" = "YES" ]; then - wavcurfile=$COMINwave/rundata/${CDUMPwave}.${WAVECUR_FID}.${cycle}.cur + wavcurfile="${COM_WAVE_PREP}/${RUNwave}.${WAVECUR_FID}.${cycle}.cur" if [ ! -f $wavcurfile ]; then echo "ERROR: WW3CURINP = ${WW3CURINP}, but missing current file" echo "Abort!" @@ -686,9 +661,13 @@ WW3_postdet() { $NLN $wavcurfile $DATA/current.${WAVECUR_FID} fi + if [[ ! -d ${COM_WAVE_HISTORY} ]]; then mkdir -p "${COM_WAVE_HISTORY}"; fi + # Link output files cd $DATA - eval $NLN $datwave/${wavprfx}.log.mww3.${PDY}${cyc} log.mww3 + if [ $waveMULTIGRID = ".true." ]; then + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.log.mww3.${PDY}${cyc}" "log.mww3" + fi # Loop for gridded output (uses FHINC) fhr=$FHMIN_WAV @@ -696,9 +675,13 @@ WW3_postdet() { YMDH=$($NDATE $fhr $CDATE) YMD=$(echo $YMDH | cut -c1-8) HMS="$(echo $YMDH | cut -c9-10)0000" - for wavGRD in ${waveGRD} ; do - eval $NLN $datwave/${wavprfx}.out_grd.${wavGRD}.${YMD}.${HMS} $DATA/${YMD}.${HMS}.out_grd.${wavGRD} - done + if [ $waveMULTIGRID = ".true." ]; then + for wavGRD in ${waveGRD} ; do + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${wavGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_grd.${wavGRD}" + done + else + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_grd.${waveGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_grd.ww3" + fi FHINC=$FHOUT_WAV if [ $FHMAX_HF_WAV -gt 0 -a $FHOUT_HF_WAV -gt 0 -a $fhr -lt $FHMAX_HF_WAV ]; then FHINC=$FHOUT_HF_WAV @@ -712,7 +695,12 @@ WW3_postdet() { YMDH=$($NDATE $fhr $CDATE) YMD=$(echo $YMDH | cut -c1-8) HMS="$(echo $YMDH | cut -c9-10)0000" - eval $NLN $datwave/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS} $DATA/${YMD}.${HMS}.out_pnt.${waveuoutpGRD} + if [ $waveMULTIGRID = ".true." ]; then + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_pnt.${waveuoutpGRD}" + else + ${NLN} "${COM_WAVE_HISTORY}/${wavprfx}.out_pnt.${waveuoutpGRD}.${YMD}.${HMS}" "${DATA}/${YMD}.${HMS}.out_pnt.ww3" + fi + FHINC=$FHINCP_WAV fhr=$((fhr+FHINC)) done @@ -720,13 +708,19 @@ WW3_postdet() { WW3_nml() { echo "SUB ${FUNCNAME[0]}: Copying input files for WW3" - WAV_MOD_TAG=${CDUMP}wave${waveMEMB} + WAV_MOD_TAG=${RUN}wave${waveMEMB} if [ "${USE_WAV_RMP:-YES}" = "YES" ]; then - for file in $(ls $COMINwave/rundata/rmp_src_to_dst_conserv_*) ; do - $NLN $file $DATA/ - done - fi - $NLN $COMINwave/rundata/ww3_multi.${CDUMPwave}${WAV_MEMBER}.${cycle}.inp $DATA/ww3_multi.inp + if (( $( ls -1 $FIXwave/rmp_src_to_dst_conserv_* 2> /dev/null | wc -l) > 0 )); then + for file in $(ls $FIXwave/rmp_src_to_dst_conserv_*) ; do + $NLN $file $DATA/ + done + else + echo 'FATAL ERROR : No rmp precomputed nc files found for wave model' + exit 4 + fi + fi + source $SCRIPTDIR/parsing_namelists_WW3.sh + WW3_namelists } WW3_out() { @@ -737,23 +731,39 @@ WW3_out() { CPL_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for general cpl fields" if [ $esmf_profile = ".true." ]; then - $NCP $DATA/ESMF_Profile.summary $ROTDIR/$CDUMP.$PDY/$cyc/ + ${NCP} "${DATA}/ESMF_Profile.summary" "${COM_ATMOS_HISTORY}/ESMF_Profile.summary" fi } MOM6_postdet() { echo "SUB ${FUNCNAME[0]}: MOM6 after run type determination" - OCNRES=${OCNRES:-"025"} - # Copy MOM6 ICs - $NCP -pf $ICSDIR/$CDATE/ocn/MOM*nc $DATA/INPUT/ + ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res.nc" "${DATA}/INPUT/MOM.res.nc" + case $OCNRES in + "025") + for nn in $(seq 1 4); do + if [[ -f "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" ]]; then + ${NLN} "${COM_OCEAN_RESTART_PREV}/${PDY}.${cyc}0000.MOM.res_${nn}.nc" "${DATA}/INPUT/MOM.res_${nn}.nc" + fi + done + ;; + esac + + # Link increment + if [[ "${DO_JEDIOCNVAR:-NO}" = "YES" ]]; then + if [[ ! -f "${COM_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" ]]; then + echo "FATAL ERROR: Ocean increment not found, ABORT!" + exit 111 + fi + ${NLN} "${COM_OCEAN_ANALYSIS}/${RUN}.t${cyc}z.ocninc.nc" "${DATA}/INPUT/mom6_increment.nc" + fi # Copy MOM6 fixed files $NCP -pf $FIXmom/$OCNRES/* $DATA/INPUT/ # Copy coupled grid_spec - spec_file="$FIX_DIR/fix_cpl/a${CASE}o${OCNRES}/grid_spec.nc" + spec_file="$FIX_DIR/cpl/a${CASE}o${OCNRES}/grid_spec.nc" if [ -s $spec_file ]; then $NCP -pf $spec_file $DATA/INPUT/ else @@ -761,12 +771,29 @@ MOM6_postdet() { exit 3 fi - # Copy mediator restart files to RUNDIR - if [ $warm_start = ".true." -o $RERUN = "YES" ]; then - $NCP $ROTDIR/$CDUMP.$PDY/$cyc/med/ufs.cpld*.nc $DATA/ - $NCP $ROTDIR/$CDUMP.$PDY/$cyc/med/rpointer.cpl $DATA/ + # Copy mediator restart files to RUNDIR # TODO: mediator should have its own CMEPS_postdet() function + if [[ $warm_start = ".true." ]]; then + local mediator_file="${COM_MED_RESTART}/${PDY}.${cyc}0000.ufs.cpld.cpl.r.nc" + if [[ -f "${mediator_file}" ]]; then + ${NCP} "${mediator_file}" "${DATA}/ufs.cpld.cpl.r.nc" + rm -f "${DATA}/rpointer.cpl" + touch "${DATA}/rpointer.cpl" + echo "ufs.cpld.cpl.r.nc" >> "${DATA}/rpointer.cpl" + else + # We have a choice to make here. + # Either we can FATAL ERROR out, or we can let the coupling fields initialize from zero + # cmeps_run_type is determined based on the availability of the mediator restart file + echo "WARNING: ${mediator_file} does not exist for warm_start = .true., initializing!" + #echo "FATAL ERROR: ${mediator_file} must exist for warm_start = .true. and does not, ABORT!" + #exit 4 + fi + else + # This is a cold start, so initialize the coupling fields from zero + export cmeps_run_type="startup" fi + # If using stochatic parameterizations, create a seed that does not exceed the + # largest signed integer if [ $DO_OCN_SPPT = "YES" -o $DO_OCN_PERT_EPBL = "YES" ]; then if [ ${SET_STP_SEED:-"YES"} = "YES" ]; then ISEED_OCNSPPT=$(( (CDATE*1000 + MEMBER*10 + 6) % 2147483647 )) @@ -776,56 +803,117 @@ MOM6_postdet() { fi fi + # Create COMOUTocean + [[ ! -d ${COM_OCEAN_HISTORY} ]] && mkdir -p "${COM_OCEAN_HISTORY}" + # Link output files + if [[ "${RUN}" =~ "gfs" ]]; then + # Link output files for RUN = gfs - export ENSMEM=${ENSMEM:-01} - export IDATE=$CDATE + # TODO: get requirements on what files need to be written out and what these dates here are and what they mean + export ENSMEM=${ENSMEM:-01} + export IDATE=$CDATE - [[ ! -d $COMOUTocean ]] && mkdir -p $COMOUTocean + fhrlst=${OUTPUT_FH} + if [[ ! -d ${COM_OCEAN_HISTORY} ]]; then mkdir -p ${COM_OCEAN_HISTORY}; fi - fhrlst=$OUTPUT_FH + for fhr in $fhrlst; do + if [ $fhr = 'anl' ]; then # Looking at OUTPUT_FH, this is never true, TODO: remove this block + continue + fi + if [ -z ${last_fhr:-} ]; then + last_fhr=$fhr + continue + fi + (( interval = fhr - last_fhr )) + (( midpoint = last_fhr + interval/2 )) + VDATE=$($NDATE $fhr $IDATE) + YYYY=$(echo $VDATE | cut -c1-4) + MM=$(echo $VDATE | cut -c5-6) + DD=$(echo $VDATE | cut -c7-8) + HH=$(echo $VDATE | cut -c9-10) + SS=$((10#$HH*3600)) + + VDATE_MID=$($NDATE $midpoint $IDATE) + YYYY_MID=$(echo $VDATE_MID | cut -c1-4) + MM_MID=$(echo $VDATE_MID | cut -c5-6) + DD_MID=$(echo $VDATE_MID | cut -c7-8) + HH_MID=$(echo $VDATE_MID | cut -c9-10) + SS_MID=$((10#$HH_MID*3600)) + + source_file="ocn_${YYYY_MID}_${MM_MID}_${DD_MID}_${HH_MID}.nc" + dest_file="ocn${VDATE}.${ENSMEM}.${IDATE}.nc" + ${NLN} ${COM_OCEAN_HISTORY}/${dest_file} ${DATA}/${source_file} + + source_file="ocn_daily_${YYYY}_${MM}_${DD}.nc" + dest_file=${source_file} + if [ ! -a "${DATA}/${source_file}" ]; then + $NLN ${COM_OCEAN_HISTORY}/${dest_file} ${DATA}/${source_file} + fi - for fhr in $fhrlst; do - if [ $fhr = 'anl' ]; then - continue - fi - if [ -z $last_fhr ]; then last_fhr=$fhr - continue - fi - (( interval = fhr - last_fhr )) - (( midpoint = last_fhr + interval/2 )) - VDATE=$($NDATE $fhr $IDATE) - YYYY=$(echo $VDATE | cut -c1-4) - MM=$(echo $VDATE | cut -c5-6) - DD=$(echo $VDATE | cut -c7-8) - HH=$(echo $VDATE | cut -c9-10) - SS=$((10#$HH*3600)) + done - VDATE_MID=$($NDATE $midpoint $IDATE) - YYYY_MID=$(echo $VDATE_MID | cut -c1-4) - MM_MID=$(echo $VDATE_MID | cut -c5-6) - DD_MID=$(echo $VDATE_MID | cut -c7-8) - HH_MID=$(echo $VDATE_MID | cut -c9-10) - SS_MID=$((10#$HH_MID*3600)) - - source_file="ocn_${YYYY_MID}_${MM_MID}_${DD_MID}_${HH_MID}.nc" - dest_file="ocn${VDATE}.${ENSMEM}.${IDATE}.nc" - ${NLN} ${COMOUTocean}/${dest_file} ${DATA}/${source_file} - - source_file="wavocn_${YYYY_MID}_${MM_MID}_${DD_MID}_${HH_MID}.nc" - dest_file=${source_file} - ${NLN} ${COMOUTocean}/${dest_file} ${DATA}/${source_file} - - source_file="ocn_daily_${YYYY}_${MM}_${DD}.nc" - dest_file=${source_file} - if [ ! -a "${DATA}/${source_file}" ]; then - $NLN ${COMOUTocean}/${dest_file} ${DATA}/${source_file} - fi + elif [[ "${RUN}" =~ "gdas" ]]; then + # Link output files for RUN = gdas + + # Save MOM6 backgrounds + for fhr in ${OUTPUT_FH}; do + local idatestr=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${fhr} hours" +%Y_%m_%d_%H) + local fhr3=$(printf %03i "${fhr}") + $NLN "${COM_OCEAN_HISTORY}/${RUN}.t${cyc}z.ocnf${fhr3}.nc" "${DATA}/ocn_da_${idatestr}.nc" + done + fi + + mkdir -p "${COM_OCEAN_RESTART}" - last_fhr=$fhr + # end point restart does not have a timestamp, calculate + local rdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${FHMAX} hours" +%Y%m%d%H) + + # Link ocean restarts from DATA to COM + # Coarser than 1/2 degree has a single MOM restart + $NLN "${COM_OCEAN_RESTART}/${rdate:0:8}.${rdate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/" + # 1/4 degree resolution has 4 additional restarts + case ${OCNRES} in + "025") + for nn in $(seq 1 4); do + $NLN "${COM_OCEAN_RESTART}/${rdate:0:8}.${rdate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/" + done + ;; + *) + ;; + esac + + # Loop over restart_interval frequency and link restarts from DATA to COM + local res_int=$(echo $restart_interval | cut -d' ' -f1) # If this is a list, get the frequency. # This is bound to break w/ IAU + local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) + while [[ $idate -lt $rdate ]]; do + local idatestr=$(date +%Y-%m-%d-%H -d "${idate:0:8} ${idate:8:2}") + $NLN "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res.nc" "${DATA}/MOM6_RESTART/" + case ${OCNRES} in + "025") + for nn in $(seq 1 4); do + $NLN "${COM_OCEAN_RESTART}/${idate:0:8}.${idate:8:2}0000.MOM.res_${nn}.nc" "${DATA}/MOM6_RESTART/" + done + ;; + esac + local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) done - $NLN ${COMOUTocean}/MOM_input $DATA/INPUT/MOM_input + + # TODO: mediator should have its own CMEPS_postdet() function + # Link mediator restarts from DATA to COM + # DANGER DANGER DANGER - Linking mediator restarts to COM causes the model to fail with a message like this below: + # Abort with message NetCDF: File exists && NC_NOCLOBBER in file pio-2.5.7/src/clib/pioc_support.c at line 2173 + # Instead of linking, copy the mediator files after the model finishes + #local COMOUTmed="${ROTDIR}/${RUN}.${PDY}/${cyc}/med" + #mkdir -p "${COMOUTmed}/RESTART" + #local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) + #while [[ $idate -le $rdate ]]; do + # local seconds=$(to_seconds ${idate:8:2}0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds + # local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}" + # $NLN "${COMOUTmed}/RESTART/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" + # local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) + #done echo "SUB ${FUNCNAME[0]}: MOM6 input data linked/copied" @@ -839,24 +927,57 @@ MOM6_nml() { MOM6_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for MOM6" + + # Copy MOM_input from DATA to COM_OCEAN_INPUT after the forecast is run (and successfull) + if [[ ! -d ${COM_OCEAN_INPUT} ]]; then mkdir -p "${COM_OCEAN_INPUT}"; fi + ${NCP} "${DATA}/INPUT/MOM_input" "${COM_OCEAN_INPUT}/" + + # TODO: mediator should have its own CMEPS_out() function + # Copy mediator restarts from DATA to COM + # Linking mediator restarts to COM causes the model to fail with a message. + # See MOM6_postdet() function for error message + mkdir -p "${COM_MED_RESTART}" + local res_int=$(echo $restart_interval | cut -d' ' -f1) # If this is a list, get the frequency. # This is bound to break w/ IAU + local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) + local rdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${FHMAX} hours" +%Y%m%d%H) + while [[ $idate -le $rdate ]]; do + local seconds=$(to_seconds ${idate:8:2}0000) # use function to_seconds from forecast_predet.sh to convert HHMMSS to seconds + local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}" + local mediator_file="${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" + if [[ -f ${mediator_file} ]]; then + $NCP "${DATA}/RESTART/ufs.cpld.cpl.r.${idatestr}.nc" "${COM_MED_RESTART}/${idate:0:8}.${idate:8:2}0000.ufs.cpld.cpl.r.nc" + else + echo "Mediator restart ${mediator_file} not found." + fi + local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) + done } CICE_postdet() { echo "SUB ${FUNCNAME[0]}: CICE after run type determination" + # TODO: move configuration settings to config.ice + + # TODO: These need to be calculated in the parsing_namelists_CICE.sh script CICE_namelists() function and set as local year=$(echo $CDATE|cut -c 1-4) month=$(echo $CDATE|cut -c 5-6) day=$(echo $CDATE|cut -c 7-8) - sec=$(echo $CDATE|cut -c 9-10) + sec=$(echo $CDATE|cut -c 9-10) stepsperhr=$((3600/$ICETIM)) nhours=$($NHOUR $CDATE ${year}010100) steps=$((nhours*stepsperhr)) npt=$((FHMAX*$stepsperhr)) # Need this in order for dump_last to work + # TODO: These settings should be elevated to config.ice histfreq_n=${histfreq_n:-6} - dumpfreq_n=${dumpfreq_n:-3024000} # restart write interval in seconds, default 35 days - dumpfreq=${dumpfreq:-"s"} # "s" or "d" or "m" for restarts at intervals of "seconds", "days" or "months" - cice_hist_avg=${cice_hist_avg:-".true."} + dumpfreq_n=${dumpfreq_n:-1000} # Set this to a really large value, as cice, mom6 and cmeps restart interval is controlled by nems.configure + dumpfreq=${dumpfreq:-"y"} # "h","d","m" or "y" for restarts at intervals of "hours", "days", "months" or "years" + + if [[ "${RUN}" =~ "gdas" ]]; then + cice_hist_avg=".false." # DA needs instantaneous + elif [[ "${RUN}" =~ "gfs" ]]; then + cice_hist_avg=".true." # P8 wants averaged over histfreq_n + fi FRAZIL_FWSALT=${FRAZIL_FWSALT:-".true."} ktherm=${ktherm:-2} @@ -866,68 +987,100 @@ CICE_postdet() { # restart_pond_lvl (if tr_pond_lvl=true): # -- if true, initialize the level ponds from restart (if runtype=continue) # -- if false, re-initialize level ponds to zero (if runtype=initial or continue) - - #TODO: Determine the proper way to determine if it's a 'hot start' or not - #note this is not mediator cold start or not - #if [ hotstart ]; then - # #continuing run "hot start" - # RUNTYPE='continue' - # USE_RESTART_TIME='.true.' - #fi - RUNTYPE='initial' - USE_RESTART_TIME='.false.' restart_pond_lvl=${restart_pond_lvl:-".false."} - ICERES=${ICERES:-"025"} - if [ $ICERES = '025' ]; then - ICERESdec="0.25" - fi - if [ $ICERES = '050' ]; then - ICERESdec="0.50" - fi - if [ $ICERES = '100' ]; then - ICERESdec="1.00" - fi + ICERES=${ICERES:-"025"} # TODO: similar to MOM_out, lift this higher ice_grid_file=${ice_grid_file:-"grid_cice_NEMS_mx${ICERES}.nc"} ice_kmt_file=${ice_kmt_file:-"kmtu_cice_NEMS_mx${ICERES}.nc"} export MESH_OCN_ICE=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"} - iceic="cice_model.res_$CDATE.nc" - - # Copy CICE IC - $NCP -p $ICSDIR/$CDATE/ice/cice_model_${ICERESdec}.res_$CDATE.nc $DATA/$iceic + # Copy/link CICE IC to DATA + if [[ "${warm_start}" = ".true." ]]; then + cice_ana="${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model_anl.res.nc" + if [[ -e ${cice_ana} ]]; then + ${NLN} "${cice_ana}" "${DATA}/cice_model.res.nc" + else + ${NLN} "${COM_ICE_RESTART_PREV}/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" + fi + else # cold start are typically SIS2 restarts obtained from somewhere else e.g. CPC + $NLN "${COM_ICE_RESTART}/${PDY}.${cyc}0000.cice_model.res.nc" "${DATA}/cice_model.res.nc" + fi + # TODO: add a check for the restarts to exist, if not, exit eloquently + rm -f "${DATA}/ice.restart_file" + touch "${DATA}/ice.restart_file" + echo "${DATA}/cice_model.res.nc" >> "${DATA}/ice.restart_file" echo "Link CICE fixed files" $NLN -sf $FIXcice/$ICERES/${ice_grid_file} $DATA/ $NLN -sf $FIXcice/$ICERES/${ice_kmt_file} $DATA/ $NLN -sf $FIXcice/$ICERES/$MESH_OCN_ICE $DATA/ - # Link output files - export ENSMEM=${ENSMEM:-01} - export IDATE=$CDATE - [[ ! -d $COMOUTice ]] && mkdir -p $COMOUTice - $NLN $COMOUTice/ice_in $DATA/ice_in - fhrlst=$OUTPUT_FH + # Link CICE output files + if [[ ! -d "${COM_ICE_HISTORY}" ]]; then mkdir -p "${COM_ICE_HISTORY}"; fi + mkdir -p ${COM_ICE_RESTART} - for fhr in $fhrlst; do - if [ $fhr = 'anl' ]; then - continue - fi - VDATE=$($NDATE $fhr $IDATE) - YYYY=$(echo $VDATE | cut -c1-4) - MM=$(echo $VDATE | cut -c5-6) - DD=$(echo $VDATE | cut -c7-8) - HH=$(echo $VDATE | cut -c9-10) - SS=$((10#$HH*3600)) + if [[ "${RUN}" =~ "gfs" ]]; then + # Link output files for RUN = gfs - if [[ 10#$fhr -eq 0 ]]; then - $NLN $COMOUTice/iceic$VDATE.$ENSMEM.$IDATE.nc $DATA/history/iceh_ic.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc - else - (( interval = fhr - last_fhr )) - $NLN $COMOUTice/ice$VDATE.$ENSMEM.$IDATE.nc $DATA/history/iceh_$(printf "%0.2d" $interval)h.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc - fi - last_fhr=$fhr + # TODO: make these forecast output files consistent w/ GFS output + # TODO: Work w/ NB to determine appropriate naming convention for these files + + export ENSMEM=${ENSMEM:-01} + export IDATE=$CDATE + + fhrlst=$OUTPUT_FH + + # TODO: consult w/ NB on how to improve on this. Gather requirements and more information on what these files are and how they are used to properly catalog them + for fhr in $fhrlst; do + if [ $fhr = 'anl' ]; then # Looking at OUTPUT_FH, this is never true. TODO: remove this block + continue + fi + VDATE=$($NDATE $fhr $IDATE) + YYYY=$(echo $VDATE | cut -c1-4) + MM=$(echo $VDATE | cut -c5-6) + DD=$(echo $VDATE | cut -c7-8) + HH=$(echo $VDATE | cut -c9-10) + SS=$((10#$HH*3600)) + + if [[ 10#$fhr -eq 0 ]]; then + ${NLN} "${COM_ICE_HISTORY}/iceic${VDATE}.${ENSMEM}.${IDATE}.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc" + else + (( interval = fhr - last_fhr )) # Umm.. isn't this histfreq_n? + ${NLN} "${COM_ICE_HISTORY}/ice${VDATE}.${ENSMEM}.${IDATE}.nc" "${DATA}/CICE_OUTPUT/iceh_$(printf "%0.2d" $interval)h.${YYYY}-${MM}-${DD}-$(printf "%5.5d" ${SS}).nc" + fi + last_fhr=$fhr + done + + elif [[ "${RUN}" =~ "gdas" ]]; then + + # Link CICE generated initial condition file from DATA/CICE_OUTPUT to COMOUTice + # This can be thought of as the f000 output from the CICE model + local seconds=$(to_seconds ${CDATE:8:2}0000) # convert HHMMSS to seconds + $NLN "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.iceic.nc" "${DATA}/CICE_OUTPUT/iceh_ic.${CDATE:0:4}-${CDATE:4:2}-${CDATE:6:2}-${seconds}.nc" + + # Link instantaneous CICE forecast output files from DATA/CICE_OUTPUT to COMOUTice + local fhr="${FHOUT}" + while [[ "${fhr}" -le "${FHMAX}" ]]; do + local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${fhr} hours" +%Y%m%d%H) + local seconds=$(to_seconds ${idate:8:2}0000) # convert HHMMSS to seconds + local fhr3=$(printf %03i ${fhr}) + $NLN "${COM_ICE_HISTORY}/${RUN}.t${cyc}z.icef${fhr3}.nc" "${DATA}/CICE_OUTPUT/iceh_inst.${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}.nc" + local fhr=$((fhr + FHOUT)) + done + + fi + + # Link CICE restarts from CICE_RESTART to COMOUTice/RESTART + # Loop over restart_interval and link restarts from DATA to COM + local res_int=$(echo ${restart_interval} | cut -d' ' -f1) # If this is a list, get the frequency. # This is bound to break w/ IAU + local rdate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${FHMAX} hours" +%Y%m%d%H) + local idate=$(date -d "${CDATE:0:8} ${CDATE:8:2} + ${res_int} hours" +%Y%m%d%H) + while [[ ${idate} -le ${rdate} ]]; do + local seconds=$(to_seconds ${idate:8:2}0000) # convert HHMMSS to seconds + local idatestr="${idate:0:4}-${idate:4:2}-${idate:6:2}-${seconds}" + $NLN "${COM_ICE_RESTART}/${idate:0:8}.${idate:8:2}0000.cice_model.res.nc" "${DATA}/CICE_RESTART/cice_model.res.${idatestr}.nc" + local idate=$(date -d "${idate:0:8} ${idate:8:2} + ${res_int} hours" +%Y%m%d%H) done } @@ -939,6 +1092,10 @@ CICE_nml() { CICE_out() { echo "SUB ${FUNCNAME[0]}: Copying output data for CICE" + + # Copy ice_in namelist from DATA to COMOUTice after the forecast is run (and successfull) + if [[ ! -d "${COM_ICE_INPUT}" ]]; then mkdir -p "${COM_ICE_INPUT}"; fi + ${NCP} "${DATA}/ice_in" "${COM_ICE_INPUT}/ice_in" } GOCART_rc() { @@ -966,7 +1123,8 @@ GOCART_rc() { cat ${AERO_CONFIG_DIR}/ExtData.${AERO_EMIS_FIRE:-none} ; \ echo "%%" ; \ } > $DATA/AERO_ExtData.rc - [[ $status -ne 0 ]] && exit $status + status=$? + if (( status != 0 )); then exit $status; fi fi fi } @@ -974,7 +1132,7 @@ GOCART_rc() { GOCART_postdet() { echo "SUB ${FUNCNAME[0]}: Linking output data for GOCART" - [[ ! -d $COMOUTaero ]] && mkdir -p $COMOUTaero + if [[ ! -d "${COM_CHEM_HISTORY}" ]]; then mkdir -p "${COM_CHEM_HISTORY}"; fi fhrlst=$OUTPUT_FH for fhr in $fhrlst; do @@ -988,6 +1146,13 @@ GOCART_postdet() { HH=$(echo $VDATE | cut -c9-10) SS=$((10#$HH*3600)) - $NLN $COMOUTaero/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4 $DATA/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4 + # + # Temporarily delete existing files due to noclobber in GOCART + # + if [[ -e "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" ]]; then + rm "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" + fi + + ${NLN} "${COM_CHEM_HISTORY}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" "${DATA}/gocart.inst_aod.${YYYY}${MM}${DD}_${HH}00z.nc4" done } diff --git a/ush/forecast_predet.sh b/ush/forecast_predet.sh index 2f9ed863ca..334eacedef 100755 --- a/ush/forecast_predet.sh +++ b/ush/forecast_predet.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## "forecast_def.sh" @@ -8,19 +8,40 @@ ## This script is a definition of functions. ##### - # For all non-evironment variables # Cycling and forecast hour specific parameters + +to_seconds() { + # Function to convert HHMMSS to seconds since 00Z + local hhmmss=${1:?} + local hh=${hhmmss:0:2} + local mm=${hhmmss:2:2} + local ss=${hhmmss:4:2} + local seconds=$((10#${hh}*3600+10#${mm}*60+10#${ss})) + local padded_seconds=$(printf "%05d" ${seconds}) + echo ${padded_seconds} +} + +middle_date(){ + # Function to calculate mid-point date in YYYYMMDDHH between two dates also in YYYYMMDDHH + local date1=${1:?} + local date2=${2:?} + local date1s=$(date -d "${date1:0:8} ${date1:8:2}" +%s) + local date2s=$(date -d "${date2:0:8} ${date2:8:2}" +%s) + local dtsecsby2=$(( $((date2s - date1s)) / 2 )) + local mid_date=$(date -d "${date1:0:8} ${date1:8:2} + ${dtsecsby2} seconds" +%Y%m%d%H%M%S) + echo ${mid_date:0:10} +} + common_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for shared through models" pwd=$(pwd) - machine=${machine:-"WCOSS_C"} + machine=${machine:-"WCOSS2"} machine=$(echo $machine | tr '[a-z]' '[A-Z]') CASE=${CASE:-C768} CDATE=${CDATE:-2017032500} DATA=${DATA:-$pwd/fv3tmp$$} # temporary running directory ROTDIR=${ROTDIR:-$pwd} # rotating archive directory - ICSDIR=${ICSDIR:-$pwd} # cold start initial conditions } DATM_predet(){ @@ -39,7 +60,6 @@ DATM_predet(){ FV3_GFS_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for FV3GFS" CDUMP=${CDUMP:-gdas} - CDUMPwave="${CDUMP}wave" FHMIN=${FHMIN:-0} FHMAX=${FHMAX:-9} FHOUT=${FHOUT:-3} @@ -72,16 +92,14 @@ FV3_GFS_predet(){ # Directories. pwd=$(pwd) - NWPROD=${NWPROD:-${NWROOT:-$pwd}} - HOMEgfs=${HOMEgfs:-$NWPROD} + HOMEgfs=${HOMEgfs:-${PACKAGEROOT:-$pwd}} FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} - FIX_AM=${FIX_AM:-$FIX_DIR/fix_am} - FIX_AER=${FIX_AER:-$FIX_DIR/fix_aer} - FIX_LUT=${FIX_LUT:-$FIX_DIR/fix_lut} - FIXfv3=${FIXfv3:-$FIX_DIR/fix_fv3_gmted2010} + FIX_AM=${FIX_AM:-$FIX_DIR/am} + FIX_AER=${FIX_AER:-$FIX_DIR/aer} + FIX_LUT=${FIX_LUT:-$FIX_DIR/lut} + FIXfv3=${FIXfv3:-$FIX_DIR/orog} DATA=${DATA:-$pwd/fv3tmp$$} # temporary running directory ROTDIR=${ROTDIR:-$pwd} # rotating archive directory - ICSDIR=${ICSDIR:-$pwd} # cold start initial conditions DMPDIR=${DMPDIR:-$pwd} # global dumps for seaice, snow and sst analysis # Model resolution specific parameters @@ -110,52 +128,24 @@ FV3_GFS_predet(){ IAU_OFFSET=${IAU_OFFSET:-0} # Model specific stuff - FCSTEXECDIR=${FCSTEXECDIR:-$HOMEgfs/sorc/ufs_model.fd/build} - FCSTEXEC=${FCSTEXEC:-ufs_model} + FCSTEXECDIR=${FCSTEXECDIR:-$HOMEgfs/exec} + FCSTEXEC=${FCSTEXEC:-ufs_model.x} PARM_FV3DIAG=${PARM_FV3DIAG:-$HOMEgfs/parm/parm_fv3diag} PARM_POST=${PARM_POST:-$HOMEgfs/parm/post} # Model config options - APRUN_FV3=${APRUN_FV3:-${APRUN_FCST:-${APRUN:-""}}} - #the following NTHREAD_FV3 line is commented out because NTHREAD_FCST is not defined - #and because NTHREADS_FV3 gets overwritten by what is in the env/${macine}.env - #file and the value of npe_node_fcst is not correctly defined when using more than - #one thread and sets NTHREADS_FV3=1 even when the number of threads is appropraitely >1 - #NTHREADS_FV3=${NTHREADS_FV3:-${NTHREADS_FCST:-${nth_fv3:-1}}} - cores_per_node=${cores_per_node:-${npe_node_fcst:-24}} ntiles=${ntiles:-6} - if [ $MEMBER -lt 0 ]; then - NTASKS_TOT=${NTASKS_TOT:-$npe_fcst_gfs} - else - NTASKS_TOT=${NTASKS_TOT:-$npe_efcs} - fi TYPE=${TYPE:-"nh"} # choices: nh, hydro MONO=${MONO:-"non-mono"} # choices: mono, non-mono QUILTING=${QUILTING:-".true."} OUTPUT_GRID=${OUTPUT_GRID:-"gaussian_grid"} - OUTPUT_FILE=${OUTPUT_FILE:-"nemsio"} WRITE_NEMSIOFLIP=${WRITE_NEMSIOFLIP:-".true."} WRITE_FSYNCFLAG=${WRITE_FSYNCFLAG:-".true."} - affix="nemsio" - [[ "$OUTPUT_FILE" = "netcdf" ]] && affix="nc" rCDUMP=${rCDUMP:-$CDUMP} - #------------------------------------------------------------------ - # setup the runtime environment - if [ $machine = "WCOSS_C" ] ; then - HUGEPAGES=${HUGEPAGES:-hugepages4M} - . $MODULESHOME/init/sh 2>/dev/null - module load iobuf craype-$HUGEPAGES 2>/dev/null - export MPICH_GNI_COLL_OPT_OFF=${MPICH_GNI_COLL_OPT_OFF:-MPI_Alltoallv} - export MKL_CBWR=AVX2 - export WRTIOBUF=${WRTIOBUF:-"4M"} - export NC_BLKSZ=${NC_BLKSZ:-"4M"} - export IOBUF_PARAMS="*nemsio:verbose:size=${WRTIOBUF},*:verbose:size=${NC_BLKSZ}" - fi - #------------------------------------------------------- if [ ! -d $ROTDIR ]; then mkdir -p $ROTDIR; fi mkdata=NO @@ -223,10 +213,9 @@ FV3_GFS_predet(){ print_freq=${print_freq:-6} #------------------------------------------------------- - if [ $CDUMP = "gfs" -a $rst_invt1 -gt 0 ]; then - RSTDIR_ATM=${RSTDIR:-$ROTDIR}/${CDUMP}.${PDY}/${cyc}/atmos/RERUN_RESTART - if [ ! -d $RSTDIR_ATM ]; then mkdir -p $RSTDIR_ATM ; fi - $NLN $RSTDIR_ATM RESTART + if [[ ${RUN} =~ "gfs" || ${RUN} = "gefs" ]] && (( rst_invt1 > 0 )); then + if [[ ! -d ${COM_ATMOS_RESTART} ]]; then mkdir -p "${COM_ATMOS_RESTART}" ; fi + ${NLN} "${COM_ATMOS_RESTART}" RESTART # The final restart written at the end doesn't include the valid date # Create links that keep the same name pattern for these files VDATE=$($NDATE +$FHMAX_GFS $CDATE) @@ -238,39 +227,19 @@ FV3_GFS_predet(){ files="${files} ${base}.tile${tile}.nc" done done - for file in $files; do - $NLN $RSTDIR_ATM/$file $RSTDIR_ATM/${vPDY}.${vcyc}0000.$file + for file in ${files}; do + ${NLN} "${COM_ATMOS_RESTART}/${file}" "${COM_ATMOS_RESTART}/${vPDY}.${vcyc}0000.${file}" done else mkdir -p $DATA/RESTART fi - #------------------------------------------------------- - # member directory - if [ $MEMBER -lt 0 ]; then - prefix=$CDUMP - rprefix=$rCDUMP - memchar="" - else - prefix=enkf$CDUMP - rprefix=enkf$rCDUMP - memchar=mem$(printf %03i $MEMBER) - fi - memdir=$ROTDIR/${prefix}.$PDY/$cyc/atmos/$memchar - if [ ! -d $memdir ]; then mkdir -p $memdir; fi - - GDATE=$($NDATE -$assim_freq $CDATE) - gPDY=$(echo $GDATE | cut -c1-8) - gcyc=$(echo $GDATE | cut -c9-10) - gmemdir=$ROTDIR/${rprefix}.$gPDY/$gcyc/atmos/$memchar - sCDATE=$($NDATE -3 $CDATE) - if [[ "$DOIAU" = "YES" ]]; then sCDATE=$($NDATE -3 $CDATE) sPDY=$(echo $sCDATE | cut -c1-8) scyc=$(echo $sCDATE | cut -c9-10) - tPDY=$gPDY - tcyc=$gcyc + tPDY=${gPDY} + tcyc=${gcyc} else sCDATE=$CDATE sPDY=$PDY @@ -284,36 +253,18 @@ FV3_GFS_predet(){ WW3_predet(){ echo "SUB ${FUNCNAME[0]}: Defining variables for WW3" - if [ $CDUMP = "gdas" ]; then - export RSTDIR_WAVE=$ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/restart - else - export RSTDIR_WAVE=${RSTDIR_WAVE:-$ROTDIR/${CDUMP}.${PDY}/${cyc}/wave/restart} - fi - if [ ! -d $RSTDIR_WAVE ]; then mkdir -p $RSTDIR_WAVE ; fi - $NLN $RSTDIR_WAVE restart_wave + if [[ ! -d "${COM_WAVE_RESTART}" ]]; then mkdir -p "${COM_WAVE_RESTART}" ; fi + ${NLN} "${COM_WAVE_RESTART}" "restart_wave" } CICE_predet(){ echo "SUB ${FUNCNAME[0]}: CICE before run type determination" - if [ ! -d $ROTDIR ]; then mkdir -p $ROTDIR; fi - if [ ! -d $DATA ]; then mkdir -p $DATA; fi - if [ ! -d $DATA/RESTART ]; then mkdir -p $DATA/RESTART; fi - if [ ! -d $DATA/INPUT ]; then mkdir -p $DATA/INPUT; fi - if [ ! -d $DATA/restart ]; then mkdir -p $DATA/restart; fi - if [ ! -d $DATA/history ]; then mkdir -p $DATA/history; fi - if [ ! -d $DATA/OUTPUT ]; then mkdir -p $DATA/OUTPUT; fi + if [ ! -d $DATA/CICE_OUTPUT ]; then mkdir -p $DATA/CICE_OUTPUT; fi + if [ ! -d $DATA/CICE_RESTART ]; then mkdir -p $DATA/CICE_RESTART; fi } MOM6_predet(){ echo "SUB ${FUNCNAME[0]}: MOM6 before run type determination" - if [ ! -d $ROTDIR ]; then mkdir -p $ROTDIR; fi - if [ ! -d $DATA ]; then mkdir -p $DATA; fi - if [ ! -d $DATA/RESTART ]; then mkdir -p $DATA/RESTART; fi - if [ ! -d $DATA/INPUT ]; then mkdir -p $DATA/INPUT; fi - if [ ! -d $DATA/restart ]; then mkdir -p $DATA/restart; fi - if [ ! -d $DATA/history ]; then mkdir -p $DATA/history; fi - if [ ! -d $DATA/OUTPUT ]; then mkdir -p $DATA/OUTPUT; fi if [ ! -d $DATA/MOM6_OUTPUT ]; then mkdir -p $DATA/MOM6_OUTPUT; fi if [ ! -d $DATA/MOM6_RESTART ]; then mkdir -p $DATA/MOM6_RESTART; fi - cd $DATA || exit 8 } diff --git a/ush/fv3gfs_downstream_nems.sh b/ush/fv3gfs_downstream_nems.sh index 3138fe75f0..48aacf0f07 100755 --- a/ush/fv3gfs_downstream_nems.sh +++ b/ush/fv3gfs_downstream_nems.sh @@ -1,5 +1,4 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash #----------------------------------------------------------------------- #-Hui-Ya Chuang, January 2014: First version. @@ -31,15 +30,14 @@ set -x # 1. Modify sea icea cover via land-sea mask. #----------------------------------------------------------------------- - -echo "!!!!!CREATING $RUN DOWNSTREAM PRODUCTS FOR FH = $FH !!!!!!" +source "$HOMEgfs/ush/preamble.sh" "$FH" export downset=${downset:-1} export DATA=${DATA:-/ptmpd2/$LOGNAME/test} -export CNVGRIB=${CNVGRIB:-${NWPROD:-/nwprod}/util/exec/cnvgrib21} -export COPYGB2=${COPYGB2:-${NWPROD:-/nwprod}/util/exec/copygb2} -export WGRIB2=${WGRIB2:-${NWPROD:-/nwprod}/util/exec/wgrib2} -export GRBINDEX=${GRBINDEX:-${NWPROD:-nwprod}/util/exec/grbindex} +export CNVGRIB=${CNVGRIB:-${grib_util_ROOT}/bin/cnvgrib} +export COPYGB2=${COPYGB2:-${grib_util_ROOT}/bin/copygb} +export WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} +export GRBINDEX=${GRBINDEX:-${wgrib2_ROOT}/bin/grbindex} export RUN=${RUN:-"gfs"} export cycn=$(echo $CDATE |cut -c 9-10) export TCYC=${TCYC:-".t${cycn}z."} @@ -79,10 +77,8 @@ elif [ $FH -eq 0 ] ; then else export paramlist=${paramlist:-$PARMpost/global_1x1_paramlist_g2} export paramlistb=${paramlistb:-$PARMpost/global_master-catchup_parmlist_g2} - export fhr3=$(expr $FH + 0 ) - if [ $fhr3 -lt 100 ]; then export fhr3="0$fhr3"; fi - if [ $fhr3 -lt 10 ]; then export fhr3="0$fhr3"; fi - if [ $fhr3%${FHOUT_PGB} -eq 0 ]; then + export fhr3=$(printf "%03d" ${FH}) + if (( FH%FHOUT_PGB == 0 )); then export PGBS=YES fi fi @@ -90,15 +86,11 @@ fi $WGRIB2 $PGBOUT2 | grep -F -f $paramlist | $WGRIB2 -i -grib tmpfile1_$fhr3 $PGBOUT2 export err=$?; err_chk -#if [ $machine = WCOSS -o $machine = WCOSS_C -a $downset = 2 ]; then if [ $downset = 2 ]; then $WGRIB2 $PGBOUT2 | grep -F -f $paramlistb | $WGRIB2 -i -grib tmpfile2_$fhr3 $PGBOUT2 export err=$?; err_chk fi -#----------------------------------------------------- -#----------------------------------------------------- -#if [ $machine = WCOSS -o $machine = WCOSS_C -o $machine = WCOSS_DELL_P3 ]; then #----------------------------------------------------- #----------------------------------------------------- export nset=1 @@ -134,16 +126,27 @@ while [ $nset -le $totalset ]; do # if final record of each piece is ugrd, add vgrd # copygb will only interpolate u and v together #$WGRIB2 -d $end $tmpfile |grep -i ugrd - $WGRIB2 -d $end $tmpfile |egrep -i "ugrd|ustm|uflx|u-gwd" + # grep returns 1 if no match is found, so temporarily turn off exit on non-zero rc + set +e + $WGRIB2 -d $end $tmpfile | egrep -i "ugrd|ustm|uflx|u-gwd" export rc=$? + set_strict if [[ $rc -eq 0 ]] ; then export end=$(expr ${end} + 1) + elif [[ $rc -gt 1 ]]; then + echo "FATAL: WGRIB2 failed with error code ${rc}" + exit $rc fi # if final record is land, add next record icec - $WGRIB2 -d $end $tmpfile |egrep -i "land" + set +e + $WGRIB2 -d $end $tmpfile | egrep -i "land" export rc=$? + set_strict if [[ $rc -eq 0 ]] ; then export end=$(expr ${end} + 1) + elif [[ $rc -gt 1 ]]; then + echo "FATAL: WGRIB2 failed with error code ${rc}" + exit $rc fi if [ $iproc -eq $nproc ]; then export end=$ncount @@ -170,16 +173,17 @@ while [ $nset -le $totalset ]; do export MP_PGMMODEL=mpmd export MP_CMDFILE=$DATA/poescript launcher=${APRUN_DWN:-"aprun -j 1 -n 24 -N 24 -d 1 cfp"} - if [ $machine = WCOSS_C -o $machine = WCOSS_DELL_P3 -o $machine = WCOSS2 ] ; then + if [ $machine = WCOSS2 ] ; then $launcher $MP_CMDFILE elif [ $machine = HERA -o $machine = ORION -o $machine = JET -o $machine = S4 ] ; then if [ -s $DATA/poescript_srun ]; then rm -f $DATA/poescript_srun; fi touch $DATA/poescript_srun nm=0 cat $DATA/poescript | while read line; do - echo "$nm $line" >> $DATA/poescript_srun + echo "$nm $line" >> $DATA/poescript_srun nm=$((nm+1)) done + nm=$(wc -l < $DATA/poescript_srun) ${launcher:-"srun --export=ALL"} -n $nm --multi-prog $DATA/poescript_srun else $launcher @@ -218,8 +222,8 @@ while [ $nset -le $totalset ]; do # $WGRIB2 land.grb -set_grib_type same -new_grid_interpolation bilinear -new_grid_winds earth -new_grid $grid0p25 newland.grb # $WGRIB2 newland.grb -set_byte 4 11 218 -grib newnewland.grb # cat ./newnewland.grb >> pgb2file_${fhr3}_0p25 - # $CNVGRIB -g21 newnewland.grb newnewland.grb1 - # cat ./newnewland.grb1 >> pgbfile_${fhr3}_0p25 + # $CNVGRIB -g21 newnewland.grb newnewland.grb1 + # cat ./newnewland.grb1 >> pgbfile_${fhr3}_0p25 ##0p5 degree # rm -f newland.grb newnewland.grb newnewland.grb1 # $WGRIB2 land.grb -set_grib_type same -new_grid_interpolation bilinear -new_grid_winds earth -new_grid $grid0p5 newland.grb @@ -236,50 +240,50 @@ while [ $nset -le $totalset ]; do if [ $nset = 1 ]; then if [ $fhr3 = anl ]; then - cp pgb2file_${fhr3}_0p25 $COMOUT/${PREFIX}pgrb2.0p25.anl - $WGRIB2 -s pgb2file_${fhr3}_0p25 > $COMOUT/${PREFIX}pgrb2.0p25.anl.idx + cp "pgb2file_${fhr3}_0p25" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.anl" + ${WGRIB2} -s "pgb2file_${fhr3}_0p25" > "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.anl.idx" if [ "$PGBS" = "YES" ]; then - cp pgb2file_${fhr3}_0p5 $COMOUT/${PREFIX}pgrb2.0p50.anl - cp pgb2file_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2.1p00.anl - $WGRIB2 -s pgb2file_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2.0p50.anl.idx - $WGRIB2 -s pgb2file_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2.1p00.anl.idx - if [ "$PGB1F" = 'YES' ]; then - cp pgbfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb.1p00.anl - $GRBINDEX $COMOUT/${PREFIX}pgrb.1p00.anl $COMOUT/${PREFIX}pgrb.1p00.anl.idx + cp "pgb2file_${fhr3}_0p5" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.anl" + cp "pgb2file_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl" + ${WGRIB2} -s "pgb2file_${fhr3}_0p5" > "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.anl.idx" + ${WGRIB2} -s "pgb2file_${fhr3}_1p0" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.anl.idx" + if [ "$PGB1F" = 'YES' ]; then + cp "pgbfile_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.anl" + ${GRBINDEX} "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.anl" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.anl.idx" fi fi else - cp pgb2file_${fhr3}_0p25 $COMOUT/${PREFIX}pgrb2.0p25.f${fhr3} - $WGRIB2 -s pgb2file_${fhr3}_0p25 > $COMOUT/${PREFIX}pgrb2.0p25.f${fhr3}.idx + cp "pgb2file_${fhr3}_0p25" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr3}" + ${WGRIB2} -s "pgb2file_${fhr3}_0p25" > "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2.0p25.f${fhr3}.idx" if [ "$PGBS" = "YES" ]; then - cp pgb2file_${fhr3}_0p5 $COMOUT/${PREFIX}pgrb2.0p50.f${fhr3} - cp pgb2file_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2.1p00.f${fhr3} - $WGRIB2 -s pgb2file_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2.0p50.f${fhr3}.idx - $WGRIB2 -s pgb2file_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2.1p00.f${fhr3}.idx + cp "pgb2file_${fhr3}_0p5" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr3}" + cp "pgb2file_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr3}" + ${WGRIB2} -s "pgb2file_${fhr3}_0p5" > "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2.0p50.f${fhr3}.idx" + ${WGRIB2} -s "pgb2file_${fhr3}_1p0" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2.1p00.f${fhr3}.idx" if [ "$PGB1F" = 'YES' ]; then - cp pgbfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb.1p00.f${fhr3} - $GRBINDEX $COMOUT/${PREFIX}pgrb.1p00.f${fhr3} $COMOUT/${PREFIX}pgrb.1p00.f${fhr3}.idx + cp "pgbfile_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.f${fhr3}" + ${GRBINDEX} "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.f${fhr3}" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb.1p00.f${fhr3}.idx" fi fi fi elif [ $nset = 2 ]; then if [ $fhr3 = anl ]; then - cp pgb2bfile_${fhr3}_0p25 $COMOUT/${PREFIX}pgrb2b.0p25.anl - $WGRIB2 -s pgb2bfile_${fhr3}_0p25 > $COMOUT/${PREFIX}pgrb2b.0p25.anl.idx + cp "pgb2bfile_${fhr3}_0p25" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.anl" + ${WGRIB2} -s "pgb2bfile_${fhr3}_0p25" > "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.anl.idx" if [ "$PGBS" = "YES" ]; then - cp pgb2bfile_${fhr3}_0p5 $COMOUT/${PREFIX}pgrb2b.0p50.anl - cp pgb2bfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2b.1p00.anl - $WGRIB2 -s pgb2bfile_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2b.0p50.anl.idx - $WGRIB2 -s pgb2bfile_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2b.1p00.anl.idx + cp "pgb2bfile_${fhr3}_0p5" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.anl" + cp "pgb2bfile_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.anl" + ${WGRIB2} -s "pgb2bfile_${fhr3}_0p5" > "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.anl.idx" + ${WGRIB2} -s "pgb2bfile_${fhr3}_1p0" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.anl.idx" fi else - cp pgb2bfile_${fhr3}_0p25 $COMOUT/${PREFIX}pgrb2b.0p25.f${fhr3} - $WGRIB2 -s pgb2bfile_${fhr3}_0p25 > $COMOUT/${PREFIX}pgrb2b.0p25.f${fhr3}.idx + cp "pgb2bfile_${fhr3}_0p25" "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.f${fhr3}" + ${WGRIB2} -s "pgb2bfile_${fhr3}_0p25" > "${COM_ATMOS_GRIB_0p25}/${PREFIX}pgrb2b.0p25.f${fhr3}.idx" if [ "$PGBS" = "YES" ]; then - cp pgb2bfile_${fhr3}_0p5 $COMOUT/${PREFIX}pgrb2b.0p50.f${fhr3} - cp pgb2bfile_${fhr3}_1p0 $COMOUT/${PREFIX}pgrb2b.1p00.f${fhr3} - $WGRIB2 -s pgb2bfile_${fhr3}_0p5 > $COMOUT/${PREFIX}pgrb2b.0p50.f${fhr3}.idx - $WGRIB2 -s pgb2bfile_${fhr3}_1p0 > $COMOUT/${PREFIX}pgrb2b.1p00.f${fhr3}.idx + cp "pgb2bfile_${fhr3}_0p5" "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.f${fhr3}" + cp "pgb2bfile_${fhr3}_1p0" "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.f${fhr3}" + ${WGRIB2} -s "pgb2bfile_${fhr3}_0p5" > "${COM_ATMOS_GRIB_0p50}/${PREFIX}pgrb2b.0p50.f${fhr3}.idx" + ${WGRIB2} -s "pgb2bfile_${fhr3}_1p0" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}pgrb2b.1p00.f${fhr3}.idx" fi fi fi @@ -287,8 +291,4 @@ while [ $nset -le $totalset ]; do export nset=$(expr $nset + 1 ) done -echo "!!!!!!CREATION OF SELECT $RUN DOWNSTREAM PRODUCTS COMPLETED FOR FHR = $FH !!!!!!!" -#--------------------------------------------------------------- - - exit 0 diff --git a/ush/fv3gfs_dwn_nems.sh b/ush/fv3gfs_dwn_nems.sh index b49daee45c..aa908df07c 100755 --- a/ush/fv3gfs_dwn_nems.sh +++ b/ush/fv3gfs_dwn_nems.sh @@ -1,5 +1,4 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash # this script generates 0.25/0.5/1/2.5 deg pgb files for each small Grib file # Hui-Ya Chuang 01/2014: First Version @@ -12,14 +11,16 @@ set -x # Wen Meng 10/2019: Use bilinear interpolation for LAND, It can trancate land-sea mask as 0 or 1. # Wen Meng 11/2019: Teak sea ice cover via land-sea mask. +source "$HOMEgfs/ush/preamble.sh" + export tmpfile=$1 export fhr3=$2 export iproc=$3 export nset=$4 -export CNVGRIB=${CNVGRIB:-$${NWPROD:-/nwprod}/util/exec/cnvgrib21} -export COPYGB2=${COPYGB2:-$${NWPROD:-/nwprod}/util/exec/copygb2} -export WGRIB2=${WGRIB2:-${NWPROD:-/nwprod}/util/exec/wgrib2} +export CNVGRIB=${CNVGRIB:-${grib_util_ROOT}/bin/cnvgrib} +export COPYGB2=${COPYGB2:-${grib_util_ROOT}/bin/copygb} +export WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} export TRIMRH=${TRIMRH:-$USHgfs/trim_rh.sh} export MODICEC=${MODICEC:-$USHgfs/mod_icec.sh} @@ -32,9 +33,9 @@ export opt25=":(APCP|ACPCP|PRATE|CPRAT):" export opt26=' -set_grib_max_bits 25 -fi -if ' export opt27=":(APCP|ACPCP|PRATE|CPRAT|DZDT):" export opt28=' -new_grid_interpolation budget -fi ' -if [ $machine = "S4" ]; then +#JKHif [ $machine = "S4" ]; then export optncpu=' -ncpu 1 ' -fi +#JKHfi export grid0p25="latlon 0:1440:0.25 90:721:-0.25" export grid0p5="latlon 0:720:0.5 90:361:-0.5" export grid1p0="latlon 0:360:1.0 90:181:-1.0" @@ -42,6 +43,7 @@ export grid2p5="latlon 0:144:2.5 90:73:-2.5" export PGB1F=${PGB1F:-"NO"} export PGBS=${PGBS:-"NO"} +optncpu=${optncpu:-} if [ $nset = 1 ]; then if [ "$PGBS" = "YES" ]; then @@ -105,5 +107,3 @@ fi # $CNVGRIB -g21 pgb2file_${fhr3}_${iproc}_1p0 pgbfile_${fhr3}_${iproc}_1p0 # $CNVGRIB -g21 pgb2file_${fhr3}_${iproc}_2p5 pgbfile_${fhr3}_${iproc}_2p5 #---------------------------------------------------------------------------------------------- - -exit 0 diff --git a/ush/fv3gfs_nc2nemsio.sh b/ush/fv3gfs_nc2nemsio.sh deleted file mode 100755 index 4b239e18a3..0000000000 --- a/ush/fv3gfs_nc2nemsio.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/ksh -set -x -#---------------------------------------------------------------------------- -#--Fanglin Yang, October 2016: convert FV3 NetCDF files to NEMSIO format. -# Note FV3 lat-lon grid is located at the center of each grid box, -# starting from south to north and from east to west. -# For example, for a 0.5-deg uniform grid, nlon=720, nlat=360 -# X(1,1)=[0.25E,89.75S], X(nlon,nlat)=[359.75E,89.75N] -#--------------------------------------------------------------------------- - -export CDATE=${CDATE:-"2016100300"} -export GG=${master_grid:-"0p25deg"} # 1deg 0p5deg 0p25deg 0p125deg -export FHZER=${FHZER:-6} # accumulation bucket in hours -export fdiag=${fdiag:-"none"} # specified forecast output hours - -pwd=$(pwd) -export DATA=${DATA:-$pwd} -export NWPROD=${NWPROD:-$pwd} -export HOMEgfs=${HOMEgfs:-$NWPROD} -export NC2NEMSIOEXE=${NC2NEMSIOEXE:-$HOMEgfs/exec/fv3nc2nemsio.x} - -cycn=$(echo $CDATE | cut -c 9-10) -export TCYC=${TCYC:-".t${cycn}z."} -export CDUMP=${CDUMP:-gfs} - -export PREFIX=${PREFIX:-${CDUMP}${TCYC}} -export SUFFIX=${SUFFIX:-".nemsio"} - -#-------------------------------------------------- -cd $DATA || exit 8 - -input_dir=$DATA -output_dir=$DATA - -in_3d=${PREFIX}nggps3d.${GG}.nc -in_2d=${PREFIX}nggps2d.${GG}.nc -if [ ! -s $in_3d -o ! -s $in_2d ]; then - echo "$in_3d and $in_2d are missing. exit" - exit 1 -fi - -#--check if the output is from non-hydrostatic case -nhrun=$(ncdump -c $in_3d | grep nhpres) -nhcase=$? - -# If no information on the time interval is given, deduce from the netCDF file -[[ $fdiag = "none" ]] && fdiag=$(ncks -H -s "%g " -C -v time $in_3d) - -#--------------------------------------------------- -nt=0 -err=0 -for fhour in $(echo $fdiag | sed "s/,/ /g"); do - nt=$((nt+1)) - ifhour=$(printf "%09d" $fhour) # convert to integer - fhzh=$(( (ifhour/FHZER-1)*FHZER )) # bucket accumulation starting hour - [[ $fhzh -lt 0 ]] && fhzh=0 - - fhr=$(printf "%03d" $fhour) - outfile=${PREFIX}atmf${fhr}${SUFFIX} - - $NC2NEMSIOEXE $CDATE $nt $fhzh $fhour $input_dir $in_2d $in_3d $output_dir $outfile $nhcase - rc=$? - ((err+=rc)) - - [[ ! -f $outfile ]] && ((err+=1)) - -done - -#--------------------------------------------------- -echo $(date) EXITING $0 with return code $err >&2 -exit $err diff --git a/ush/fv3gfs_regrid_nemsio.sh b/ush/fv3gfs_regrid_nemsio.sh deleted file mode 100755 index d43f5a5503..0000000000 --- a/ush/fv3gfs_regrid_nemsio.sh +++ /dev/null @@ -1,128 +0,0 @@ -#!/bin/sh - -################################################################################ -# UNIX Script Documentation Block -# Script name: fv3gfs_regrid_nemsio.sh -# Script description: Remap FV3 forecasts on six tile in NetCDF to global Gaussian -# grid with NEMSIO output -# -# $Id$ -# -# Author: Fanglin Yang Org: NCEP/EMC Date: 2016-12-01 -# Abstract: regrid_nemsio.fd provided by Jeffrey.S.Whitaker OAR/ESRL -# -# Script history log: -# 2016-12-01 Fanglin Yang -# 2017-02-13 Rahul Mahajan -# -# Attributes: -# Language: Portable Operating System Interface (POSIX) Shell -# Machine: WCOSS-CRAY, Theia -################################################################################ - -# Set environment. -VERBOSE=${VERBOSE:-"YES"} -if [ $VERBOSE = YES ] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi - -#------------------------------------------------------- -# Directories and paths -pwd=$(pwd) -DATA=${DATA:-$pwd} -NWPROD=${NWPROD:-$pwd} -HOMEgfs=${HOMEgfs:-$NWPROD} -FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} -FIX_AM=${FIX_AM:-$FIX_DIR/fix_am} -FIXfv3=${FIXfv3:-$FIX_DIR/fix_fv3_gmted2010} -REGRID_NEMSIO_EXEC=${REGRID_NEMSIO_EXEC:-$HOMEgfs/exec/regrid_nemsio} -REGRID_NEMSIO_TBL=${REGRID_NEMSIO_TBL:-$HOMEgfs/parm/parm_fv3diag/variable_table.txt} - -CDATE=${CDATE:-2017011500} -CDUMP=${CDUMP:-"gdas"} -CASE=${CASE:-C768} -LEVS=${LEVS:-65} -GG=${GG:-gaussian} # gaussian or regular lat-lon -res=$(echo $CASE | cut -c2-) -JCAP=${JCAP:-$((res*2-2))} -LATB=${LATB:-$((res*2))} -LONB=${LONB:-$((res*4))} - -NEMSIO_OUT2DNAME=${NEMSIO_OUT2DNAME:-sfc.$CDATE} -NEMSIO_OUT3DNAME=${NEMSIO_OUT3DNAME:-atm.$CDATE} -DEBUG=${REGRID_NEMSIO_DEBUG:-".true."} - -APRUN_REGRID_NEMSIO=${APRUN_REGRID_NEMSIO:-${APRUN:-""}} -NTHREADS_REGRID_NEMSIO=${NTHREADS_REGRID_NEMSIO:-${NTHREADS:-1}} - -NMV=${NMV:-"/bin/mv"} - -#------------------------------------------------------- -# IO specific parameters and error traps -ERRSCRIPT=${ERRSCRIPT:-'eval [[ $err = 0 ]]'} - -#-------------------------------------------------- -# ESMF regrid weights and output variable table -weight_bilinear=${weight_bilinear:-$FIXfv3/$CASE/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB}_lat${LATB}.${GG}.bilinear.nc} -weight_neareststod=${weight_neareststod:-$FIXfv3/$CASE/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB}_lat${LATB}.${GG}.neareststod.nc} - -#------------------------------------------------------- -# Go to the directory where the history files are -cd $DATA || exit 8 - -#------------------------------------------------------- -# Create namelist -rm -f regrid-nemsio.input - -cat > regrid-nemsio.input << EOF -&share - debug=$DEBUG, - ntrunc=$JCAP, - nlons=$LONB, - nlats=$LATB, - datapathout2d='$NEMSIO_OUT2DNAME', - datapathout3d='$NEMSIO_OUT3DNAME', - analysis_filename='fv3_history.tile1.nc','fv3_history.tile2.nc','fv3_history.tile3.nc','fv3_history.tile4.nc','fv3_history.tile5.nc','fv3_history.tile6.nc', - analysis_filename2d='fv3_history2d.tile1.nc','fv3_history2d.tile2.nc','fv3_history2d.tile3.nc','fv3_history2d.tile4.nc','fv3_history2d.tile5.nc','fv3_history2d.tile6.nc', - forecast_timestamp='${CDATE}', - variable_table='$REGRID_NEMSIO_TBL', - nemsio_opt3d='bin4', - nemsio_opt2d='bin4' -/ - -&interpio - esmf_bilinear_filename='$weight_bilinear', - esmf_neareststod_filename='$weight_neareststod', - gfs_hyblevs_filename='$FIX_AM/global_hyblev.l$LEVS.txt' -/ -EOF - -#------------------------------------------------------------------ -export OMP_NUM_THREADS=$NTHREADS_REGRID_NEMSIO -$APRUN_REGRID_NEMSIO $REGRID_NEMSIO_EXEC - -export ERR=$? -export err=$ERR -$ERRSCRIPT || exit $err - -rm -f regrid-nemsio.input - -#------------------------------------------------------------------ -PDY=$(echo $CDATE | cut -c1-8) -cyc=$(echo $CDATE | cut -c9-10) -PREFIX=${PREFIX:-"${CDUMP}.t${cyc}z."} -SUFFIX=${SUFFIX:-".nemsio"} -for ftype in atm sfc; do - for file in $(ls -1 ${ftype}.${CDATE}.fhr*); do - fhrchar=$(echo $file | cut -d. -f3 | cut -c4-) - $NMV $file ${PREFIX}${ftype}f${fhrchar}${SUFFIX} - done -done - -#------------------------------------------------------------------ -set +x -if [ $VERBOSE = "YES" ] ; then - echo $(date) EXITING $0 with return code $err >&2 -fi -exit $err diff --git a/ush/fv3gfs_remap.sh b/ush/fv3gfs_remap.sh index d5258e0975..430e96c868 100755 --- a/ush/fv3gfs_remap.sh +++ b/ush/fv3gfs_remap.sh @@ -1,21 +1,21 @@ -#!/bin/ksh -set -ax +#! /usr/bin/env bash #-------------------------------------- #-- remap FV3 6 tiles to global array #-- Fanglin Yang, October 2016 #-------------------------------------- +source "$HOMEgfs/ush/preamble.sh" + export CDATE=${CDATE:-"2016100300"} export CASE=${CASE:-"C192"} # C48 C96 C192 C384 C768 C1152 C3072 export GG=${master_grid:-"0p25deg"} # 1deg 0p5deg 0p25deg 0p125deg pwd=$(pwd) export DATA=${DATA:-$pwd} -export NWPROD=${NWPROD:-$pwd} -export HOMEgfs=${HOMEgfs:-$NWPROD} +export HOMEgfs=${HOMEgfs:-$PACKAGEROOT} export FIX_DIR=${FIX_DIR:-$HOMEgfs/fix} -export FIXfv3=${FIXfv3:-$FIX_DIR/fix_fv3_gmted2010} +export FIXfv3=${FIXfv3:-$FIX_DIR/orog} export REMAPEXE=${REMAPEXE:-$HOMEgfs/exec/fregrid_parallel} export IPD4=${IPD4:-"YES"} @@ -114,6 +114,5 @@ for type in atmos_4xdaily nggps2d nggps3d ; do done -echo $(date) EXITING $0 with return code $err >&2 exit $err diff --git a/ush/fv3gfs_remap_weights.sh b/ush/fv3gfs_remap_weights.sh index a4140a7c88..15dfc73e3f 100755 --- a/ush/fv3gfs_remap_weights.sh +++ b/ush/fv3gfs_remap_weights.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash #BSUB -L /bin/sh #BSUB -P FV3GFS-T2O #BSUB -oo log.weights diff --git a/ush/gaussian_sfcanl.sh b/ush/gaussian_sfcanl.sh index 866cd6d255..1e68ca0186 100755 --- a/ush/gaussian_sfcanl.sh +++ b/ush/gaussian_sfcanl.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################################ #### UNIX Script Documentation Block # . . @@ -27,9 +28,9 @@ # HOMEgfs Directory for gfs version. Default is # $BASEDIR/gfs_ver.v15.0.0} # FIXam Directory for the global fixed climatology files. -# Defaults to $HOMEgfs/fix/fix_am +# Defaults to $HOMEgfs/fix/am # FIXfv3 Directory for the model grid and orography netcdf -# files. Defaults to $HOMEgfs/fix/fix_fv3_gmted2010 +# files. Defaults to $HOMEgfs/fix/orog # FIXWGTS Weight file to use for interpolation # EXECgfs Directory of the program executable. Defaults to # $HOMEgfs/exec @@ -41,7 +42,7 @@ # defaults to current working directory # XC Suffix to add to executables. Defaults to none. # GAUSFCANLEXE Program executable. -# Defaults to $EXECgfs/gaussian_sfcanl.exe +# Defaults to $EXECgfs/gaussian_sfcanl.x # INISCRIPT Preprocessing script. Defaults to none. # LOGSCRIPT Log posting script. Defaults to none. # ERRSCRIPT Error processing script @@ -86,11 +87,11 @@ # $FIXWGTS # $FIXam/global_hyblev.l65.txt # -# input data : $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile*.nc +# input data : ${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile*.nc # # output data: $PGMOUT # $PGMERR -# $COMOUT/${APREFIX}sfcanl${ASUFFIX} +# $COMOUT/${APREFIX}sfcanl.nc # # Remarks: # @@ -109,12 +110,7 @@ # ################################################################################ -# Set environment. -VERBOSE=${VERBOSE:-"NO"} -if [[ "$VERBOSE" = "YES" ]] ; then - echo $(date) EXECUTING $0 $* >&2 - set -x -fi +source "$HOMEgfs/ush/preamble.sh" CASE=${CASE:-C768} res=$(echo $CASE | cut -c2-) @@ -125,27 +121,19 @@ LATB_SFC=${LATB_SFC:-$LATB_CASE} DONST=${DONST:-"NO"} LEVS=${LEVS:-64} LEVSP1=$(($LEVS+1)) -OUTPUT_FILE=${OUTPUT_FILE:-"nemsio"} -if [ $OUTPUT_FILE = "netcdf" ]; then - export NETCDF_OUT=".true." -else - export NETCDF_OUT=".false." -fi - # Directories. -gfs_ver=${gfs_ver:-v15.0.0} -BASEDIR=${BASEDIR:-${NWROOT:-/nwprod2}} -HOMEgfs=${HOMEgfs:-$BASEDIR/gfs_ver.${gfs_ver}} +gfs_ver=${gfs_ver:-v16.3.0} +BASEDIR=${BASEDIR:-${PACKAGEROOT:-/lfs/h1/ops/prod/packages}} +HOMEgfs=${HOMEgfs:-$BASEDIR/gfs.${gfs_ver}} EXECgfs=${EXECgfs:-$HOMEgfs/exec} -FIXfv3=${FIXfv3:-$HOMEgfs/fix/fix_fv3_gmted2010} -FIXam=${FIXam:-$HOMEgfs/fix/fix_am} +FIXfv3=${FIXfv3:-$HOMEgfs/fix/orog} +FIXam=${FIXam:-$HOMEgfs/fix/am} FIXWGTS=${FIXWGTS:-$FIXfv3/$CASE/fv3_SCRIP_${CASE}_GRIDSPEC_lon${LONB_SFC}_lat${LATB_SFC}.gaussian.neareststod.nc} DATA=${DATA:-$(pwd)} -COMOUT=${COMOUT:-$(pwd)} # Filenames. -XC=${XC} -GAUSFCANLEXE=${GAUSFCANLEXE:-$EXECgfs/gaussian_sfcanl.exe} +XC=${XC:-} +GAUSFCANLEXE=${GAUSFCANLEXE:-$EXECgfs/gaussian_sfcanl.x} SIGLEVEL=${SIGLEVEL:-$FIXam/global_hyblev.l${LEVSP1}.txt} CDATE=${CDATE:?} @@ -160,7 +148,7 @@ export REDERR=${REDERR:-'2>'} # Set defaults ################################################################################ # Preprocessing -$INISCRIPT +${INISCRIPT:-} pwd=$(pwd) if [[ -d $DATA ]] then @@ -170,7 +158,8 @@ else mkdata=YES fi cd $DATA||exit 99 -[[ -d $COMOUT ]]||mkdir -p $COMOUT +[[ -d "${COM_ATMOS_ANALYSIS}" ]] || mkdir -p "${COM_ATMOS_ANALYSIS}" +[[ -d "${COM_ATMOS_RESTART}" ]] || mkdir -p "${COM_ATMOS_RESTART}" cd $DATA ################################################################################ @@ -179,12 +168,10 @@ export PGM=$GAUSFCANLEXE export pgm=$PGM $LOGSCRIPT -PDY=$(echo $CDATE | cut -c1-8) -cyc=$(echo $CDATE | cut -c9-10) -iy=$(echo $CDATE | cut -c1-4) -im=$(echo $CDATE | cut -c5-6) -id=$(echo $CDATE | cut -c7-8) -ih=$(echo $CDATE | cut -c9-10) +iy=${PDY:0:4} +im=${PDY:4:2} +id=${PDY:6:2} +ih=${cyc} export OMP_NUM_THREADS=${OMP_NUM_THREADS_SFC:-1} @@ -192,12 +179,12 @@ export OMP_NUM_THREADS=${OMP_NUM_THREADS_SFC:-1} $NLN $FIXWGTS ./weights.nc # input analysis tiles (with nst records) -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile1.nc ./anal.tile1.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile2.nc ./anal.tile2.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile3.nc ./anal.tile3.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile4.nc ./anal.tile4.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile5.nc ./anal.tile5.nc -$NLN $COMOUT/RESTART/${PDY}.${cyc}0000.sfcanl_data.tile6.nc ./anal.tile6.nc +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile1.nc" "./anal.tile1.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile2.nc" "./anal.tile2.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile3.nc" "./anal.tile3.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile4.nc" "./anal.tile4.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile5.nc" "./anal.tile5.nc" +${NLN} "${COM_ATMOS_RESTART}/${PDY}.${cyc}0000.sfcanl_data.tile6.nc" "./anal.tile6.nc" # input orography tiles $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile1.nc ./orog.tile1.nc @@ -210,19 +197,21 @@ $NLN $FIXfv3/$CASE/${CASE}_oro_data.tile6.nc ./orog.tile6.nc $NLN $SIGLEVEL ./vcoord.txt # output gaussian global surface analysis files -$NLN $COMOUT/${APREFIX}sfcanl${ASUFFIX} ./sfc.gaussian.analysis.file +${NLN} "${COM_ATMOS_ANALYSIS}/${APREFIX}sfcanl.nc" "./sfc.gaussian.analysis.file" + +# Namelist uses booleans now +if [[ ${DONST} == "YES" ]]; then do_nst='.true.'; else do_nst='.false.'; fi # Executable namelist cat < fort.41 &setup - yy=$iy, - mm=$im, - dd=$id, - hh=$ih, - igaus=$LONB_SFC, - jgaus=$LATB_SFC, - donst=$DONST, - netcdf_out=$NETCDF_OUT + yy=${iy}, + mm=${im}, + dd=${id}, + hh=${ih}, + igaus=${LONB_SFC}, + jgaus=${LATB_SFC}, + donst=${do_nst}, / EOF @@ -236,10 +225,5 @@ $ERRSCRIPT||exit 2 # Postprocessing cd $pwd [[ $mkdata = YES ]]&&rmdir $DATA -$ENDSCRIPT -set +x -if [[ "$VERBOSE" = "YES" ]] -then - echo $(date) EXITING $0 with return code $err >&2 -fi -exit $err + +exit ${err} diff --git a/ush/getdump.sh b/ush/getdump.sh index a9142f1a51..462ca5e755 100755 --- a/ush/getdump.sh +++ b/ush/getdump.sh @@ -1,12 +1,13 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash -export COMPONENT=${COMPONENT:-atmos} +source "$HOMEgfs/ush/preamble.sh" + +COMPONENT=${COMPONENT:-atmos} CDATE=${1:-""} CDUMP=${2:-""} -SOURCE_DIR=${3:-$DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}} -TARGET_DIR=${4:-$ROTDIR/${CDUMP}.${PDY}/$cyc/$COMPONENT} +SOURCE_DIR=${3:-$DMPDIR/${CDUMP}${DUMP_SUFFIX}.${PDY}/${cyc}/${COMPONENT}} +TARGET_DIR=${4:-$ROTDIR/${CDUMP}.${PDY}/${cyc}/${COMPONENT}} DUMP_SUFFIX=${DUMP_SUFFIX:-""} @@ -38,6 +39,3 @@ else fi exit 0 - - - diff --git a/ush/getges.sh b/ush/getges.sh index 62ce0eea3f..2fb54fccc7 100755 --- a/ush/getges.sh +++ b/ush/getges.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################################ # # Name: getges.sh Author: Mark Iredell @@ -74,6 +75,9 @@ # ################################################################################ #------------------------------------------------------------------------------- + +source "$HOMEgfs/ush/preamble.sh" + # Set some default parameters. fhbeg=03 # hour to begin searching backward for guess fhinc=03 # hour to increment backward in search @@ -151,7 +155,7 @@ if [[ $gfile = '?' || $# -gt 1 || $err -ne 0 || -z $valid ||\ fi exit 1 fi -[[ $quiet = NO ]]&&set -x + if [[ $envir != prod && $envir != test && $envir != para && $envir != dump && $envir != pr? && $envir != dev ]];then netwk=$envir envir=prod @@ -1345,8 +1349,9 @@ while [[ $fh -le $fhend ]];do ghp2=$fhp2;[[ $ghp2 -lt 100 ]]&&ghp2=0$ghp2 ghp3=$fhp3;[[ $ghp3 -lt 100 ]]&&ghp3=0$ghp3 id=$($NDATE -$fh $valid) - typeset -L8 day=$id - typeset -R2 cyc=$id + + day=$(echo $id | xargs | cut -c8) + cyc=$(echo $id | xargs | rev | cut -c1-2 | rev) eval list=\$getlist$fh [[ -z "$list" ]]&&list=${geslist} for ges_var in $list;do @@ -1369,8 +1374,10 @@ fi # Either copy guess to a file or write guess name to standard output. if [[ -z "$gfile" ]];then echo $ges - exit $? + err=$? else cp $ges $gfile - exit $? + err=$? fi + +exit ${err} diff --git a/ush/getgfsnctime b/ush/getgfsnctime new file mode 100755 index 0000000000..d493339292 --- /dev/null +++ b/ush/getgfsnctime @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# getgfsnctime +# cory.r.martin@noaa.gov +# 2019-10-17 +# script to return initial and valid time +# for specified netCDF file +import argparse +import gsi_utils +import datetime + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Get initial/valid time information from a FV3GFS netCDF file') + parser.add_argument('ncfile', help='path to input netCDF file', + type=str) + parser.add_argument('-i','--init', action='store_true', default=False, + help='option to print out initialized time YYYYMMDDHHMM') + parser.add_argument('-v','--valid', action='store_true', default=False, + help='option to print out valid time YYYYMMDDHHMM') + parser.add_argument('-f','--fhour', action='store_true', default=False, + help='option to print out forecast hour') + args = parser.parse_args() + inittime, validtime, nfhour = gsi_utils.get_timeinfo(args.ncfile) + if args.init: + print(inittime.strftime("%Y%m%d%H%M")) + elif args.valid: + print(validtime.strftime("%Y%m%d%H%M")) + elif args.fhour: + print(nfhour) + else: + print(args.ncfile) + print('Initial time: '+inittime.strftime("%Y-%m-%d %H:%M")) + print('Valid time: '+validtime.strftime("%Y-%m-%d %H:%M")) + print('Forecast hour: '+str(nfhour)) diff --git a/ush/getncdimlen b/ush/getncdimlen new file mode 100755 index 0000000000..5d230f6cc3 --- /dev/null +++ b/ush/getncdimlen @@ -0,0 +1,17 @@ +#!/usr/bin/env python +# getncdimlen +# cory.r.martin@noaa.gov +# 2019-10-17 +# script to return length of requested dimension +# for specified netCDF file +import argparse +import gsi_utils + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Get length of dimension specified from a FV3GFS netCDF file') + parser.add_argument('ncfile', help='path to input netCDF file', type=str) + parser.add_argument('dimname', help='name of dimension (ex: grid_xt)', type=str) + args = parser.parse_args() + FileDims = gsi_utils.get_ncdims(args.ncfile) + print(FileDims[args.dimname]) diff --git a/ush/gfs_bfr2gpk.sh b/ush/gfs_bfr2gpk.sh index 5971817f00..add68536ec 100755 --- a/ush/gfs_bfr2gpk.sh +++ b/ush/gfs_bfr2gpk.sh @@ -1,4 +1,5 @@ -#!/bin/sh +#! /usr/bin/env bash + ######################################################################### # # # Script: gfs_bfr2gpk # @@ -9,7 +10,7 @@ # Log: # # K. Brill/HPC 04/12/05 # ######################################################################### -set -x +source "${HOMEgfs}/ush/preamble.sh" # Set GEMPAK paths. @@ -17,32 +18,19 @@ set -x # Go to a working directory. -cd $DATA - -# Set input directory name. - -#BPATH=$COMIN/bufr.t${cyc}z -BPATH=$COMOUT/bufr.t${cyc}z -export BPATH +cd "${DATA}" || exit 2 # Set output directory: - -COMAWP=${COMAWP:-$COMOUT/gempak} -OUTDIR=$COMAWP -if [ ! -d $OUTDIR ]; then mkdir -p $OUTDIR; fi +if [[ ! -d "${COM_ATMOS_GEMPAK}" ]]; then mkdir -p "${COM_ATMOS_GEMPAK}"; fi outfilbase=gfs_${PDY}${cyc} # Get the list of individual station files. date -##filelist=$(/bin/ls -1 $BPATH | grep bufr) -##rm -f bufr.combined -##for file in $filelist; do -## cat $BPATH/$file >> bufr.combined -##done - cat $BPATH/bufr.*.${PDY}${cyc} > bufr.combined +cat "${COM_ATMOS_BUFR}/bufr."*".${PDY}${cyc}" > bufr.combined date + namsnd << EOF > /dev/null SNBUFR = bufr.combined SNOUTF = ${outfilbase}.snd @@ -54,20 +42,20 @@ r ex EOF + date -/bin/rm *.nts +/bin/rm ./*.nts snd=${outfilbase}.snd sfc=${outfilbase}.sfc -cp $snd $OUTDIR/.$snd -cp $sfc $OUTDIR/.$sfc -mv $OUTDIR/.$snd $OUTDIR/$snd -mv $OUTDIR/.$sfc $OUTDIR/$sfc - -if [ $SENDDBN = "YES" ] -then - $DBNROOT/bin/dbn_alert MODEL GFS_PTYP_SFC $job $OUTDIR/$sfc - $DBNROOT/bin/dbn_alert MODEL GFS_PTYP_SND $job $OUTDIR/$snd +cp "${snd}" "${COM_ATMOS_GEMPAK}/.${snd}" +cp "${sfc}" "${COM_ATMOS_GEMPAK}/.${sfc}" +mv "${COM_ATMOS_GEMPAK}/.${snd}" "${COM_ATMOS_GEMPAK}/${snd}" +mv "${COM_ATMOS_GEMPAK}/.${sfc}" "${COM_ATMOS_GEMPAK}/${sfc}" + +if [[ ${SENDDBN} == "YES" ]]; then + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PTYP_SFC "${job}" "${COM_ATMOS_GEMPAK}/${sfc}" + "${DBNROOT}/bin/dbn_alert" MODEL GFS_PTYP_SND "${job}" "${COM_ATMOS_GEMPAK}/${snd}" fi -echo done > $DATA/gembufr.done +echo "done" > "${DATA}/gembufr.done" diff --git a/ush/gfs_bufr.sh b/ush/gfs_bufr.sh index 5788bbcc43..b782c707c9 100755 --- a/ush/gfs_bufr.sh +++ b/ush/gfs_bufr.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + # # UTILITY SCRIPT NAME : gfsbufr.sh # AUTHOR : Hua-Lu Pan @@ -16,33 +17,20 @@ # 2018-05-22 Guang Ping Lou: Making it work for both GFS and FV3GFS # 2018-05-30 Guang Ping Lou: Make sure all files are available. # 2019-10-10 Guang Ping Lou: Read in NetCDF files -echo "History: February 2003 - First implementation of this utility script" +# echo "History: February 2003 - First implementation of this utility script" # +source "${HOMEgfs:?}/ush/preamble.sh" -set -ax - -if test "$F00FLAG" = "YES" -then +if [[ "${F00FLAG}" == "YES" ]]; then f00flag=".true." else f00flag=".false." fi -hh=$FSTART -while test $hh -le $FEND -do - hh=$( expr $hh + $FINT ) - if test $hh -lt 10 - then - hh=0$hh - fi -done - -export pgm=gfs_bufr +export pgm="gfs_bufr.x" #. prep_step -if test "$MAKEBUFR" = "YES" -then +if [[ "${MAKEBUFR}" == "YES" ]]; then bufrflag=".true." else bufrflag=".false." @@ -51,65 +39,55 @@ fi ##fformat="nc" ##fformat="nemsio" - CLASS="class1fv3" +CLASS="class1fv3" cat << EOF > gfsparm &NAMMET - levs=$LEVS,makebufr=$bufrflag, - dird="$COMOUT/bufr.${cycle}/bufr", - nstart=$FSTART,nend=$FEND,nint=$FINT, - nend1=$NEND1,nint1=$NINT1,nint3=$NINT3, - nsfc=80,f00=$f00flag,fformat=$fformat,np1=0 + levs=${LEVS},makebufr=${bufrflag}, + dird="${COM_ATMOS_BUFR}/bufr", + nstart=${FSTART},nend=${FEND},nint=${FINT}, + nend1=${NEND1},nint1=${NINT1},nint3=${NINT3}, + nsfc=80,f00=${f00flag},fformat=${fformat},np1=0 / EOF -hh=$FSTART - if test $hh -lt 100 - then - hh1=$(echo "${hh#"${hh%??}"}") - hh=$hh1 - fi -while test $hh -le $FEND -do - if test $hh -lt 100 - then - hh2=0$hh - else - hh2=$hh - fi +for (( hr = 10#${FSTART}; hr <= 10#${FEND}; hr = hr + 10#${FINT} )); do + hh2=$(printf %02i "${hr}") + hh3=$(printf %03i "${hr}") -#--------------------------------------------------------- -# Make sure all files are available: + #--------------------------------------------------------- + # Make sure all files are available: ic=0 - while [ $ic -lt 1000 ] - do - if [ ! -f $COMIN/${RUN}.${cycle}.logf${hh2}.${logfm} ] - then + while (( ic < 1000 )); do + if [[ ! -f "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.logf${hh3}.${logfm}" ]]; then sleep 10 - ic=$(expr $ic + 1) + ic=$((ic + 1)) else break fi - if [ $ic -ge 360 ] - then - err_exit "COULD NOT LOCATE logf${hh2} file AFTER 1 HOUR" + if (( ic >= 360 )); then + echo "FATAL: COULD NOT LOCATE logf${hh3} file AFTER 1 HOUR" + exit 2 fi done -#------------------------------------------------------------------ - ln -sf $COMIN/${RUN}.${cycle}.atmf${hh2}.${atmfm} sigf${hh} - ln -sf $COMIN/${RUN}.${cycle}.sfcf${hh2}.${atmfm} flxf${hh} - - hh=$( expr $hh + $FINT ) - if test $hh -lt 10 - then - hh=0$hh - fi -done + #------------------------------------------------------------------ + ln -sf "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh3}.${atmfm}" "sigf${hh2}" + ln -sf "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh3}.${atmfm}" "flxf${hh2}" +done # define input BUFR table file. -ln -sf $PARMbufrsnd/bufr_gfs_${CLASS}.tbl fort.1 -ln -sf ${STNLIST:-$PARMbufrsnd/bufr_stalist.meteo.gfs} fort.8 -ln -sf $PARMbufrsnd/bufr_ij13km.txt fort.7 +ln -sf "${PARMbufrsnd}/bufr_gfs_${CLASS}.tbl" fort.1 +ln -sf "${STNLIST:-${PARMbufrsnd}/bufr_stalist.meteo.gfs}" fort.8 +ln -sf "${PARMbufrsnd}/bufr_ij13km.txt" fort.7 + +${APRUN_POSTSND} "${EXECbufrsnd}/${pgm}" < gfsparm > "out_gfs_bufr_${FEND}" +export err=$? + +if [ $err -ne 0 ]; then + echo "GFS postsnd job error, Please check files " + echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.atmf${hh2}.${atmfm}" + echo "${COM_ATMOS_HISTORY}/${RUN}.${cycle}.sfcf${hh2}.${atmfm}" + err_chk +fi -${APRUN_POSTSND} $EXECbufrsnd/gfs_bufr < gfsparm > out_gfs_bufr_$FEND -export err=$?;err_chk +exit ${err} diff --git a/ush/gfs_bufr_netcdf.sh b/ush/gfs_bufr_netcdf.sh index 9733e02c3d..b358c6b69a 100755 --- a/ush/gfs_bufr_netcdf.sh +++ b/ush/gfs_bufr_netcdf.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + # # UTILITY SCRIPT NAME : gfsbufr.sh # AUTHOR : Hua-Lu Pan @@ -16,10 +17,9 @@ # 2018-05-22 Guang Ping Lou: Making it work for both GFS and FV3GFS # 2018-05-30 Guang Ping Lou: Make sure all files are available. # 2019-10-10 Guang Ping Lou: Read in NetCDF files -echo "History: February 2003 - First implementation of this utility script" +# echo "History: February 2003 - First implementation of this utility script" # - -set -ax +source "$HOMEgfs/ush/preamble.sh" if test "$F00FLAG" = "YES" then @@ -38,7 +38,7 @@ do fi done -export pgm=gfs_bufr +export pgm="gfs_bufr.x" #. prep_step if test "$MAKEBUFR" = "YES" @@ -48,10 +48,8 @@ else bufrflag=".false." fi -fformat="nc" - - SFCF="sfc" - CLASS="class1fv3" +SFCF="sfc" +CLASS="class1fv3" cat << EOF > gfsparm &NAMMET levs=$LEVS,makebufr=$bufrflag, @@ -82,7 +80,7 @@ do ic=0 while [ $ic -lt 1000 ] do - if [ ! -f $COMIN/${RUN}.${cycle}.logf${hh2}.${fformat} ] + if [ ! -f $COMIN/${RUN}.${cycle}.logf${hh2}.txt ] then sleep 10 ic=$(expr $ic + 1) @@ -96,8 +94,8 @@ do fi done #------------------------------------------------------------------ - ln -sf $COMIN/${RUN}.${cycle}.atmf${hh2}.${fformat} sigf${hh} - ln -sf $COMIN/${RUN}.${cycle}.${SFCF}f${hh2}.${fformat} flxf${hh} + ln -sf $COMIN/${RUN}.${cycle}.atmf${hh2}.nc sigf${hh} + ln -sf $COMIN/${RUN}.${cycle}.${SFCF}f${hh2}.nc flxf${hh} hh=$( expr $hh + $FINT ) if test $hh -lt 10 @@ -111,5 +109,7 @@ ln -sf $PARMbufrsnd/bufr_gfs_${CLASS}.tbl fort.1 ln -sf ${STNLIST:-$PARMbufrsnd/bufr_stalist.meteo.gfs} fort.8 ln -sf $PARMbufrsnd/bufr_ij13km.txt fort.7 -${APRUN_POSTSND} $EXECbufrsnd/gfs_bufr < gfsparm > out_gfs_bufr_$FEND -export err=$?;err_chk +${APRUN_POSTSND} "${EXECbufrsnd}/${pgm}" < gfsparm > "out_gfs_bufr_${FEND}" +export err=$? + +exit ${err} diff --git a/ush/gfs_post.sh b/ush/gfs_post.sh new file mode 100755 index 0000000000..01161acf52 --- /dev/null +++ b/ush/gfs_post.sh @@ -0,0 +1,416 @@ +#! /usr/bin/env bash + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: gfs_post.sh +# Script description: Posts the global pressure GRIB file +# +# Author: Mark Iredell Org: NP23 Date: 1999-05-01 +# +# Abstract: This script reads a single global GFS IO file and (optionally) +# a global flux file and creates a global pressure GRIB file. +# The resolution and generating code of the output GRIB file can also +# be set in the argument list. +# +# Script history log: +# 1999-05-01 Mark Iredell +# 2007-04-04 Huiya Chuang: Modify the script to run unified post +# 2012-06-04 Jun Wang: add grib2 option +# 2015-03-20 Lin Gan: add Perl for Post XML performance upgrade +# 2016-02-08 Lin Gan: Modify to use Vertical Structure +# 2018-02-05 Wen Meng: For EE2 standard, create gfs_post.sh based +# global_post.sh and change EXECglobal to EXECgfs; +# Remove legacy setting for reading non-nemsio model output +# and generating grib1 data +# 2019-06-02 Wen Meng: Remove the links of gfs fix files. +# 2021-06-11 Yali Mao: Instead of err_chk, 'exit $err' for wafsfile +# if POSTGPEXEC fails +# +# Usage: global_postgp.sh SIGINP FLXINP FLXIOUT PGBOUT PGIOUT IGEN +# +# Input script positional parameters: +# 1 Input sigma file +# defaults to $SIGINP +# 2 Input flux file +# defaults to $FLXINP +# 3 Output flux index file +# defaults to $FLXIOUT +# 4 Output pressure GRIB file +# defaults to $PGBOUT +# 5 Output pressure GRIB index file +# defaults to $PGIOUT, then to none +# 8 Model generating code, +# defaults to $IGEN, then to input sigma generating code +# +# Imported Shell Variables: +# SIGINP Input sigma file +# overridden by $1 +# FLXINP Input flux file +# overridden by $2 +# FLXIOUT Output flux index file +# overridden by $3 +# PGBOUT Output pressure GRIB file +# overridden by $4. If not defined, +# post will use the filename specified in +# the control file +# PGIOUT Output pressure GRIB index file +# overridden by $5; defaults to none +# IGEN Model generating code +# overridden by $8; defaults to input sigma generating code +##### Moorthi: Add new imported shell variable for running chgres +# CHGRESSH optional: the script to run chgres +# default to to ${USHglobal}/global_chgres.sh +# SIGLEVEL optional: the coordinate text file +# default to to /nwprod/fix/global_hyblev.l${LEVS}.txt +##### Chuang: Add new imported Shell Variable for post +# OUTTYP Output file type read in by post +# 1: if user has a sigma file and needs post to run chgres to convert to gfs io file +# 2: if user already has a gfs io file +# 3: if user uses post to read sigma file directly +# 0: if user wishes to generate both gfsio and sigma files +# 4: if user uses post to read nemsio file directly +# VDATE Verifying date 10 digits yyyymmddhh +# GFSOUT Optional, output file name from chgres which is input file name to post +# if model already runs gfs io, make sure GFSOUT is linked to the gfsio file +# CTLFILE Optional, Your version of control file if not using operational one +# OVERPARMEXEC Optional, the executable for changing Grib KPDS ID +# default to to ${EXECglobal}/overparm_grib +# CHGRESTHREAD Optional, speed up chgres by using multiple threads +# default to 1 +# FILTER Optional, set to 1 to filter SLP and 500 mb height using copygb +# D3DINP Optional, Inout D3D file, if not defined, post will run +# without processing D3D file +# D3DOUT Optional, output D3D file, if not defined, post will +# use the file name specified in the control file +# IPVOUT Optional, output IPV file, if not defined, post will +# use the file name specified in the control file +# GENPSICHI Optional, set to YES will generate psi and chi and +# append it to the end of PGBOUT. Default to NO +# GENPSICHIEXE Optional, specify where executable is for generating +# psi and chi. +######################################################################## +# EXECUTIL Directory for utility executables +# defaults to /nwprod/util/exec +# USHUTIL Directory for utility scripts +# defaults to /nwprod/util/ush +# EXECglobal Directory for global executables +# defaults to /nwprod/exec +# USHglobal Directory for global scripts +# defaults to /nwprod/ush +# DATA working directory +# (if nonexistent will be made, used and deleted) +# defaults to current working directory +# MP Multi-processing type ("p" or "s") +# defaults to "p", or "s" if LOADL_STEP_TYPE is not PARALLEL +# XC Suffix to add to executables +# defaults to none +# POSTGPEXEC Global post executable +# defaults to ${EXECglobal}/upp.x +# GRBINDEX GRIB index maker +# defaults to ${EXECUTIL}/grbindex$XC +# POSTGPLIST File containing further namelist inputs +# defaults to /dev/null +# INISCRIPT Preprocessing script +# defaults to none +# LOGSCRIPT Log posting script +# defaults to none +# ERRSCRIPT Error processing script +# defaults to 'eval [[ $err = 0 ]]' +# ENDSCRIPT Postprocessing script +# defaults to none +# POSTGPVARS Other namelist inputs to the global post executable +# such as IDRT,KO,PO,KTT,KT,PT,KZZ,ZZ, +# NCPUS,MXBIT,IDS,POB,POT,MOO,MOOA,MOW,MOWA, +# ICEN,ICEN2,IENST,IENSI +# defaults to none set +# NTHREADS Number of threads +# defaults to 1 +# NTHSTACK Size of stack per thread +# defaults to 64000000 +# VERBOSE Verbose flag (YES or NO) +# defaults to NO +# PGMOUT Executable standard output +# defaults to $pgmout, then to '&1' +# PGMERR Executable standard error +# defaults to $pgmerr, then to '&1' +# pgmout Executable standard output default +# pgmerr Executable standard error default +# REDOUT standard output redirect ('1>' or '1>>') +# defaults to '1>', or to '1>>' to append if $PGMOUT is a file +# REDERR standard error redirect ('2>' or '2>>') +# defaults to '2>', or to '2>>' to append if $PGMERR is a file +# +# Exported Shell Variables: +# PGM Current program name +# pgm +# ERR Last return code +# err +# +# Modules and files referenced: +# scripts : $INISCRIPT +# $LOGSCRIPT +# $ERRSCRIPT +# $ENDSCRIPT +# +# programs : $POSTGPEXEC +# $GRBINDEX +# +# input data : $1 or $SIGINP +# $2 or $SFCINP +# $POSTGPLIST +# +# output data: $3 or $FLXIOUT +# $4 or $PGBOUT +# $5 or $PGIOUT +# $PGMOUT +# $PGMERR +# +# scratch : ${DATA}/postgp.inp.sig +# ${DATA}/postgp.inp.flx +# ${DATA}/postgp.out.pgb +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +# Control variable resolution priority +# 1 Command line argument. +# 2 Environment variable. +# 3 Inline default. +# +# Attributes: +# Language: POSIX shell +# Machine: IBM SP +# +#### +################################################################################ +# Set environment. +source "${HOMEgfs}/ush/preamble.sh" + +# Command line arguments. +export SIGINP=${1:-${SIGINP:-}} +export FLXINP=${2:-${FLXINP:-}} +export FLXIOUT=${3:-${FLXIOUT:-}} +export PGBOUT=${4:-${PGBOUT:-}} +#export PGIOUT=${5:-${PGIOUT}} +export PGIOUT=${PGIOUT:-pgb.idx} +export IO=${6:-${IO:-0}} +export JO=${7:-${JO:-0}} +export IGEN=${8:-${IGEN:-0}} +# Directories. +export NWPROD=${NWPROD:-/nwprod} +#export EXECUTIL=${EXECUTIL:-${NWPROD}/util/exec} +export USHUTIL=${USHUTIL:-${NWPROD}/util/ush} +export EXECgfs=${EXECgfs:-${NWPROD}/exec} +export USHgfs=${USHgfs:-${NWPROD}/ush} +export DATA=${DATA:-$(pwd)} +# Filenames. +export MP=${MP:-$([[ ${LOADL_STEP_TYPE:-SERIAL} = PARALLEL ]]&&echo "p"||echo "s")} +export XC=${XC:-} +export POSTGPEXEC=${POSTGPEXEC:-${EXECgfs}/upp.x} +export OVERPARMEXEC=${OVERPARMEXEC:-${EXECgfs}/overparm_grib} +export POSTGPLIST=${POSTGPLIST:-/dev/null} +export INISCRIPT=${INISCRIPT:-} +# Ignore warning about single quote not subtituting now +# shellcheck disable=SC2016 +export ERRSCRIPT=${ERRSCRIPT:-'eval (( err == 0 ))'} +# shellcheck disable= +export LOGSCRIPT=${LOGSCRIPT:-} +export ENDSCRIPT=${ENDSCRIPT:-} +export GFSOUT=${GFSOUT:-gfsout} +export CTLFILE=${CTLFILE:-${NWPROD}/parm/gfs_cntrl.parm} +export GRIBVERSION=${GRIBVERSION:-'grib1'} +# Other variables. +export POSTGPVARS=${POSTGPVARS} +export NTHREADS=${NTHREADS:-1} +export NTHSTACK=${NTHSTACK:-64000000} +export PGMOUT=${PGMOUT:-${pgmout:-'&1'}} +export PGMERR=${PGMERR:-${pgmerr:-'&2'}} +export CHGRESTHREAD=${CHGRESTHREAD:-1} +export FILTER=${FILTER:-0} +export GENPSICHI=${GENPSICHI:-NO} +export GENPSICHIEXE=${GENPSICHIEXE:-${EXECgfs}/genpsiandchi} +export ens=${ens:-NO} +#export D3DINP=${D3DINP:-/dev/null} +l="$(echo "${PGMOUT}" | xargs | cut -c1)" +[[ ${l} = '&' ]]&&a=''||a='>' +export REDOUT=${REDOUT:-'1>'${a}} +l="$(echo "${PGMERR}" | xargs | cut -c1)" +[[ ${l} = '&' ]]&&a=''||a='>' +export REDERR=${REDERR:-'2>'${a}} +################################################################################ + +# Chuang: Run chgres if OUTTYP=1 or 0 + +export APRUN=${APRUNP:-${APRUN:-""}} + +# exit if NEMSINP does not exist +if (( OUTTYP == 4 )) ; then + if [ ! -s "${NEMSINP}" ] || [ ! -s "${FLXINP}" ] ; then + echo "model files not found, exitting" + exit 111 + fi +fi + +export SIGHDR=${SIGHDR:-${NWPROD}/exec/global_sighdr} +export IDRT=${IDRT:-4} + +# run post to read file if OUTTYP=4 +if (( OUTTYP == 4 )) ; then + export MODEL_OUT_FORM=${MODEL_OUT_FORM:-netcdfpara} + export GFSOUT=${NEMSINP} +fi + +# allow threads to use threading in Jim's sp lib +# but set default to 1 +export OMP_NUM_THREADS=${OMP_NUM_THREADS:-1} + +pwd=$(pwd) +if [[ -d "${DATA}" ]]; then + mkdata=NO +else + mkdir -p "${DATA}" + mkdata=YES +fi +cd "${DATA}" || exit 99 +################################################################################ +# Post GRIB +export PGM=${POSTGPEXEC} +export pgm=${PGM} +${LOGSCRIPT} +cat <<-EOF >postgp.inp.nml$$ + &NAMPGB + ${POSTGPVARS} +EOF + +cat <<-EOF >>postgp.inp.nml$$ + / +EOF + +if [[ "${VERBOSE}" = "YES" ]]; then + cat postgp.inp.nml$$ +fi + +# making the time stamp format for ncep post +YY=$(echo "${VDATE}" | cut -c1-4) +MM=$(echo "${VDATE}" | cut -c5-6) +DD=$(echo "${VDATE}" | cut -c7-8) +HH=$(echo "${VDATE}" | cut -c9-10) +export YY MM DD HH + +cat > itag <<-EOF + &model_inputs + fileName='${GFSOUT}' + IOFORM='${MODEL_OUT_FORM}' + grib='${GRIBVERSION}' + DateStr='${YY}-${MM}-${DD}_${HH}:00:00' + MODELNAME='GFS' + fileNameFlux='${FLXINP}' + / +EOF + +cat postgp.inp.nml$$ >> itag + +cat itag + +rm -f fort.* + +#ln -sf $SIGINP postgp.inp.sig$$ +#ln -sf $FLXINP postgp.inp.flx$$ +#ln -sf $PGBOUT postgp.out.pgb$$ + +# change model generating Grib number +if [ "${GRIBVERSION}" = "grib2" ]; then + cp "${POSTGRB2TBL}" . + cp "${PostFlatFile}" ./postxconfig-NT.txt + if [ "${ens}" = "YES" ] ; then + sed < "${PostFlatFile}" -e "s#negatively_pert_fcst#${ens_pert_type}#" > ./postxconfig-NT.txt + fi + # cp ${CTLFILE} postcntrl.xml +fi +CTL=$(basename "${CTLFILE}") +export CTL + +ln -sf griddef.out fort.110 +cp "${PARMpost}/nam_micro_lookup.dat" ./eta_micro_lookup.dat + +echo "gfs_post.sh OMP_NUM_THREADS= ${OMP_NUM_THREADS}" +${APRUN:-mpirun.lsf} "${POSTGPEXEC}" < itag > "outpost_gfs_${VDATE}_${CTL}" + +export ERR=$? +export err=${ERR} + +if (( err != 0 )) ; then + if [ "${PGBOUT}" = "wafsfile" ] ; then + exit "${err}" + fi +fi +${ERRSCRIPT} || exit 2 + +if [ "${FILTER}" = "1" ] ; then + # Filter SLP and 500 mb height using copygb, change GRIB ID, and then + # cat the filtered fields to the pressure GRIB file, from Iredell + + if [ "${GRIBVERSION}" = "grib2" ]; then + if [ "${ens}" = "YES" ] ; then + "${COPYGB2}" -x -i'4,0,80' -k'1 3 0 7*-9999 101 0 0' "${PGBOUT}" tfile + export err=$?; err_chk + else + "${COPYGB2}" -x -i'4,0,80' -k'0 3 0 7*-9999 101 0 0' "${PGBOUT}" tfile + export err=$?; err_chk + fi + ${WGRIB2} tfile -set_byte 4 11 1 -grib prmsl + export err=$?; err_chk + if [ "${ens}" = "YES" ] ; then + "${COPYGB2}" -x -i'4,1,5' -k'1 3 5 7*-9999 100 0 50000' "${PGBOUT}" tfile + export err=$?; err_chk + else + "${COPYGB2}" -x -i'4,1,5' -k'0 3 5 7*-9999 100 0 50000' "${PGBOUT}" tfile + export err=$?; err_chk + fi + ${WGRIB2} tfile -set_byte 4 11 193 -grib h5wav + export err=$?; err_chk + + #cat $PGBOUT prmsl h5wav >> $PGBOUT + #wm + # cat prmsl h5wav >> $PGBOUT + [[ -f prmsl ]] && rm prmsl + [[ -f h5wav ]] && rm h5wav + [[ -f tfile ]] && rm tfile + fi +fi + +################################################################################ +# Make GRIB index file +if [[ -n "${PGIOUT}" ]]; then + if [ "${GRIBVERSION}" = "grib2" ]; then + ${GRB2INDEX} "${PGBOUT}" "${PGIOUT}" + fi +fi +if [[ -r ${FLXINP} && -n ${FLXIOUT} && ${OUTTYP} -le 3 ]]; then + ${GRBINDEX} "${FLXINP}" "${FLXIOUT}" +fi +################################################################################ +# generate psi and chi +echo "GENPSICHI = ${GENPSICHI}" +if [ "${GENPSICHI}" = "YES" ] ; then + #echo "PGBOUT PGIOUT=" $PGBOUT $PGIOUT + #echo "YY MM=" $YY $MM + export psichifile=./psichi.grb + ${GENPSICHIEXE} < postgp.inp.nml$$ + rc=$? + if (( rc != 0 )); then + echo "Nonzero return code rc=${rc}" + exit 3 + fi + cat ./psichi.grb >> "${PGBOUT}" +fi +################################################################################ +# Postprocessing +cd "${pwd}" || exit 2 +[[ "${mkdata}" = "YES" ]] && rmdir "${DATA}" + +exit "${err}" diff --git a/ush/gfs_sndp.sh b/ush/gfs_sndp.sh index 53bc6fd9d6..579dd5ae25 100755 --- a/ush/gfs_sndp.sh +++ b/ush/gfs_sndp.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + ################################################################ # Script Name: gfs_sndp.sh # Script Description: Format GFS BUFR sounding files for AWIPS @@ -6,7 +7,7 @@ # 1) 2004-09-10 Steve Gilbert First Implementation ################################################################ -set -x +source "$HOMEgfs/ush/preamble.sh" # Create "collectives" consisting of groupings of the soundings # into files designated by geographical region. Each input @@ -16,7 +17,6 @@ export m=$1 mkdir $DATA/$m cd $DATA/$m cp $FIXbufrsnd/gfs_collective${m}.list $DATA/$m/. -set +x CCCC=KWBC file_list=gfs_collective${m}.list @@ -32,14 +32,12 @@ set +x for stn in $(cat $file_list) do - cp ${COMOUT}/bufr.${cycle}/bufr.$stn.$PDY$cyc $DATA/${m}/bufrin - export pgm=tocsbufr + cp "${COM_ATMOS_BUFR}/bufr.${stn}.${PDY}${cyc}" "${DATA}/${m}/bufrin" + export pgm=tocsbufr.x #. prep_step export FORT11=$DATA/${m}/bufrin export FORT51=./bufrout - # JY - Turn off the startmsg to reduce the update on jlogfile in this loop - # startmsg - $EXECbufrsnd/tocsbufr << EOF + ${EXECbufrsnd}/${pgm} << EOF &INPUT BULHED="$WMOHEAD",KWBX="$CCCC", NCEP2STD=.TRUE., @@ -47,12 +45,11 @@ set +x MAXFILESIZE=600000 / EOF - # JY export err=$?; err_chk - export err=$?; #err_chk - if [ $err -ne 0 ] - then - echo "ERROR in $pgm" + export err=$?; + if (( err != 0 )); then + echo "FATAL ERROR in ${pgm}" err_chk + exit 3 fi cat $DATA/${m}/bufrout >> $DATA/${m}/gfs_collective$m.fil @@ -60,19 +57,12 @@ EOF rm $DATA/${m}/bufrout done -set -x -# if test $SENDCOM = 'NO' - if test $SENDCOM = 'YES' - then - if [ $SENDDBN = 'YES' ] ; then - cp $DATA/${m}/gfs_collective$m.fil $pcom/gfs_collective$m.postsnd_$cyc - $DBNROOT/bin/dbn_alert NTC_LOW BUFR $job $pcom/gfs_collective$m.postsnd_$cyc + if [[ ${SENDCOM} == 'YES' ]]; then + if [[ ${SENDDBN} == 'YES' ]] ; then + cp "${DATA}/${m}/gfs_collective${m}.fil" "${COM_ATMOS_WMO}/gfs_collective${m}.postsnd_${cyc}" + "${DBNROOT}/bin/dbn_alert" NTC_LOW BUFR "${job}" \ + "${COM_ATMOS_WMO}/gfs_collective${m}.postsnd_${cyc}" fi - cp $DATA/${m}/gfs_collective$m.fil ${COMOUT}/bufr.${cycle}/. + cp "${DATA}/${m}/gfs_collective${m}.fil" "${COM_ATMOS_BUFR}/." fi -## let "m=m+1" - -## done - -#exit diff --git a/ush/gfs_transfer.sh b/ush/gfs_transfer.sh index f8b00ea623..9d23ec849a 100755 --- a/ush/gfs_transfer.sh +++ b/ush/gfs_transfer.sh @@ -1,21 +1,22 @@ -#!/bin/ksh +#! /usr/bin/env bash ##################################################################### -echo "-----------------------------------------------------" -echo " Script: gfs_transfer.sh" -echo " " -echo " Purpose - Copy GFS Posts to /nwges and /com" -echo " Alert posted files to DBNet" -echo " " -echo " History - " -echo " Cooke - 04/21/05 - Inital version, based off of" -echo " global_transfer.sh" -echo " Meng - 01/04/18 - Remove writing data file to /nwges." -echo " Meng - 09/14/20 - Update model output format to netcdf for GFS V16" -echo "-----------------------------------------------------" +# echo "-----------------------------------------------------" +# echo " Script: gfs_transfer.sh" +# echo " " +# echo " Purpose - Copy GFS Posts to /nwges and /com" +# echo " Alert posted files to DBNet" +# echo " " +# echo " History - " +# echo " Cooke - 04/21/05 - Inital version, based off of" +# echo " global_transfer.sh" +# echo " Meng - 01/04/18 - Remove writing data file to /nwges." +# echo " Meng - 09/14/20 - Update model output format to netcdf for GFS V16" +# echo "-----------------------------------------------------" ##################################################################### -set -xa - + +source "$HOMEgfs/ush/preamble.sh" + # export CNVGRIB=/nwprod/util/exec/cnvgrib # export GRB2INDX=/nwprod/util/exec/grb2index # export WGRIB2=/nwprod/util/exec/wgrib2 diff --git a/ush/gfs_truncate_enkf.sh b/ush/gfs_truncate_enkf.sh index 8d9e2b959c..0a7d6fc0dd 100755 --- a/ush/gfs_truncate_enkf.sh +++ b/ush/gfs_truncate_enkf.sh @@ -1,6 +1,6 @@ -#!/bin/ksh +#! /usr/bin/env bash -set -x +source "$HOMEgfs/ush/preamble.sh" member=$1 export SIGINP=$2 @@ -14,7 +14,7 @@ mkdir -p $DATATMP cd $DATATMP export LEVS=${LEVS_LORES:-64} -export FIXam=${FIXam:-$HOMEgfs/fix/fix_am} +export FIXam=${FIXam:-$HOMEgfs/fix/am} export CHGRESSH=${CHGRESSH:-${USHgfs}/global_chgres.sh} export CHGRESEXEC=${CHGRESEXEC-${EXECgfs}/global_chgres} @@ -45,12 +45,11 @@ export APRUNC=${APRUNC:-""} export VERBOSE=YES echo "execute $CHGRESSH for $member" -eval "$CHGRESSH" +$CHGRESSH rc=$? export ERR=$rc export err=$ERR -echo EXITING $0 with return code $err exit $err diff --git a/ush/global_extrkr.sh b/ush/global_extrkr.sh deleted file mode 100755 index 67624a9898..0000000000 --- a/ush/global_extrkr.sh +++ /dev/null @@ -1,1703 +0,0 @@ -#!/bin/ksh - -#module load ics -export PS4='+t+$SECONDS extrkr.sh:$LINENO -- ' - -userid=$LOGNAME - -set +x -############################################################################## -cat<${DATA}/tmpsynvit.${atcfout}.${PDY}${CYL} - grep "${current_str}" ${synvitdir}/${synvitfile} \ - >>${DATA}/tmpsynvit.${atcfout}.${PDY}${CYL} - grep "${future_str}" ${synvitfuture_dir}/${synvitfuture_file} \ - >>${DATA}/tmpsynvit.${atcfout}.${PDY}${CYL} -else - set +x - echo " " - echo " There is no (synthetic) TC vitals file for ${CYL}z in ${synvitdir}," - echo " nor is there a TC vitals file for ${old_hh}z in ${synvitold_dir}." - echo " nor is there a TC vitals file for ${future_hh}z in ${synvitfuture_dir}," - echo " Checking the raw TC Vitals file ....." - echo " " - set -x -fi - -# Take the vitals from Steve Lord's /com/gfs/prod tcvitals file, -# and cat them with the NHC-only vitals from the raw, original -# /com/arch/prod/synda_tcvitals file. Do this because the nwprod -# tcvitals file is the original tcvitals file, and Steve runs a -# program that ignores the vitals for a storm that's over land or -# even just too close to land, and for tracking purposes for the -# US regional models, we need these locations. Only include these -# "inland" storm vitals for NHC (we're not going to track inland -# storms that are outside of NHC's domain of responsibility -- we -# don't need that info). -# UPDATE 5/12/98 MARCHOK: awk logic is added to screen NHC -# vitals such as "89E TEST", since TPC -# does not want tracks for such storms. - -grep "${old_str}" ${archsyndir}/syndat_tcvitals.${CENT}${syy} | \ - grep -v TEST | awk 'substr($0,6,1) !~ /8/ {print $0}' \ - >${DATA}/tmprawvit.${atcfout}.${PDY}${CYL} -grep "${current_str}" ${archsyndir}/syndat_tcvitals.${CENT}${syy} | \ - grep -v TEST | awk 'substr($0,6,1) !~ /8/ {print $0}' \ - >>${DATA}/tmprawvit.${atcfout}.${PDY}${CYL} -grep "${future_str}" ${archsyndir}/syndat_tcvitals.${CENT}${syy} | \ - grep -v TEST | awk 'substr($0,6,1) !~ /8/ {print $0}' \ - >>${DATA}/tmprawvit.${atcfout}.${PDY}${CYL} - - -# IMPORTANT: When "cat-ing" these files, make sure that the vitals -# files from the "raw" TC vitals files are first in order and Steve's -# TC vitals files second. This is because Steve's vitals file has -# been error-checked, so if we have a duplicate tc vitals record in -# these 2 files (very likely), program supvit.x below will -# only take the last vitals record listed for a particular storm in -# the vitals file (all previous duplicates are ignored, and Steve's -# error-checked vitals records are kept). - -cat ${DATA}/tmprawvit.${atcfout}.${PDY}${CYL} ${DATA}/tmpsynvit.${atcfout}.${PDY}${CYL} \ - >${DATA}/vitals.${atcfout}.${PDY}${CYL} - -#--------------------------------------------------------------# -# Now run a fortran program that will read all the TC vitals -# records for the current dtg and the dtg from 6h ago, and -# sort out any duplicates. If the program finds a storm that -# was included in the vitals file 6h ago but not for the current -# dtg, this program updates the 6h-old first guess position -# and puts these updated records as well as the records from -# the current dtg into a temporary vitals file. It is this -# temporary vitals file that is then used as the input for the -# tracking program. -#--------------------------------------------------------------# - -oldymdh=$( ${NDATE:?} -${vit_incr} ${PDY}${CYL}) -oldyy=${oldymdh:2:2} -oldmm=${oldymdh:4:2} -olddd=${oldymdh:6:2} -oldhh=${oldymdh:8:2} -oldymd=${oldyy}${oldmm}${olddd} - -futureymdh=$( ${NDATE:?} 6 ${PDY}${CYL}) -futureyy=${futureymdh:2:2} -futuremm=${futureymdh:4:2} -futuredd=${futureymdh:6:2} -futurehh=${futureymdh:8:2} -futureymd=${futureyy}${futuremm}${futuredd} - -cat<${DATA}/suv_input.${atcfout}.${PDY}${CYL} -&datenowin dnow%yy=${syy}, dnow%mm=${smm}, - dnow%dd=${sdd}, dnow%hh=${CYL}/ -&dateoldin dold%yy=${oldyy}, dold%mm=${oldmm}, - dold%dd=${olddd}, dold%hh=${oldhh}/ -&datefuturein dfuture%yy=${futureyy}, dfuture%mm=${futuremm}, - dfuture%dd=${futuredd}, dfuture%hh=${futurehh}/ -&hourinfo vit_hr_incr=${vit_incr}/ -EOF - - -numvitrecs=$(cat ${DATA}/vitals.${atcfout}.${PDY}${CYL} | wc -l) -if [ ${numvitrecs} -eq 0 ] -then - - if [ ${trkrtype} = 'tracker' ] - then - set +x - echo " " - echo "!!! NOTE -- There are no vitals records for this time period." - echo "!!! File ${DATA}/vitals.${atcfout}.${PDY}${CYL} is empty." - echo "!!! It could just be that there are no storms for the current" - echo "!!! time. Please check the dates and submit this job again...." - echo " " - set -x - exit 1 - fi - -fi - -# For tcgen cases, filter to use only vitals from the ocean -# basin of interest.... - -if [ ${trkrtype} = 'tcgen' ] - then - - if [ ${numvitrecs} -gt 0 ] - then - - fullvitfile=${DATA}/vitals.${atcfout}.${PDY}${CYL} - cp $fullvitfile ${DATA}/vitals.all_basins.${atcfout}.${PDY}${CYL} - basin=$( echo $regtype | cut -c1-2) - - if [ ${basin} = 'al' ]; then - cat $fullvitfile | awk '{if (substr($0,8,1) == "L") print $0}' \ - >${DATA}/vitals.tcgen_al_only.${atcfout}.${PDY}${CYL} - cp ${DATA}/vitals.tcgen_al_only.${atcfout}.${PDY}${CYL} \ - ${DATA}/vitals.${atcfout}.${PDY}${CYL} - fi - if [ ${basin} = 'ep' ]; then - cat $fullvitfile | awk '{if (substr($0,8,1) == "E") print $0}' \ - >${DATA}/vitals.tcgen_ep_only.${atcfout}.${PDY}${CYL} - cp ${DATA}/vitals.tcgen_ep_only.${atcfout}.${PDY}${CYL} \ - ${DATA}/vitals.${atcfout}.${PDY}${CYL} - fi - if [ ${basin} = 'wp' ]; then - cat $fullvitfile | awk '{if (substr($0,8,1) == "W") print $0}' \ - >${DATA}/vitals.tcgen_wp_only.${atcfout}.${PDY}${CYL} - cp ${DATA}/vitals.tcgen_wp_only.${atcfout}.${PDY}${CYL} \ - ${DATA}/vitals.${atcfout}.${PDY}${CYL} - fi - - cat ${DATA}/vitals.${atcfout}.${PDY}${CYL} - - fi - -fi - -# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# Before running the program to read, sort and update the vitals, -# first run the vitals through some awk logic, the purpose of -# which is to convert all the 2-digit years into 4-digit years. -# We need this logic to ensure that all the vitals going -# into supvit.f have uniform, 4-digit years in their records. -# -# 1/8/2000: sed code added by Tim Marchok due to the fact that -# some of the vitals were getting past the syndata/qctropcy -# error-checking with a colon in them; the colon appeared -# in the character immediately to the left of the date, which -# was messing up the "(length($4) == 8)" statement logic. -# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -sed -e "s/\:/ /g" ${DATA}/vitals.${atcfout}.${PDY}${CYL} > ${DATA}/tempvit -mv ${DATA}/tempvit ${DATA}/vitals.${atcfout}.${PDY}${CYL} - -awk ' -{ - yycheck = substr($0,20,2) - if ((yycheck == 20 || yycheck == 19) && (length($4) == 8)) { - printf ("%s\n",$0) - } - else { - if (yycheck >= 0 && yycheck <= 50) { - printf ("%s20%s\n",substr($0,1,19),substr($0,20)) - } - else { - printf ("%s19%s\n",substr($0,1,19),substr($0,20)) - } - } -} ' ${DATA}/vitals.${atcfout}.${PDY}${CYL} >${DATA}/vitals.${atcfout}.${PDY}${CYL}.y4 - -mv ${DATA}/vitals.${atcfout}.${PDY}${CYL}.y4 ${DATA}/vitals.${atcfout}.${PDY}${CYL} - -if [ ${numvitrecs} -gt 0 ] -then - - export pgm=supvit - . $prep_step - - ln -s -f ${DATA}/vitals.${atcfout}.${PDY}${CYL} fort.31 - ln -s -f ${DATA}/vitals.upd.${atcfout}.${PDY}${CYL} fort.51 - - msg="$pgm start for $atcfout at ${CYL}z" - $postmsg "$jlogfile" "$msg" - - ${exectrkdir}/supvit <${DATA}/suv_input.${atcfout}.${PDY}${CYL} - suvrcc=$? - - if [ ${suvrcc} -eq 0 ] - then - msg="$pgm end for $atcfout at ${CYL}z completed normally" - $postmsg "$jlogfile" "$msg" - else - set +x - echo " " - echo "!!! ERROR -- An error occurred while running supvit.x, " - echo "!!! which is the program that updates the TC Vitals file." - echo "!!! Return code from supvit.x = ${suvrcc}" - echo "!!! model= ${atcfout}, forecast initial time = ${PDY}${CYL}" - echo "!!! Exiting...." - echo " " - set -x - err_exit " FAILED ${jobid} - ERROR RUNNING SUPVIT IN TRACKER SCRIPT- ABNORMAL EXIT" - fi - -else - - touch ${DATA}/vitals.upd.${atcfout}.${PDY}${CYL} - -fi - -#----------------------------------------------------------------- -# In this section, check to see if the user requested the use of -# operational TC vitals records for the initial time only. This -# option might be used for a retrospective medium range forecast -# in which the user wants to initialize with the storms that are -# currently there, but then let the model do its own thing for -# the next 10 or 14 days.... - -#------------------------------------------------------------------# -# Now select all storms to be processed, that is, process every -# storm that's listed in the updated vitals file for the current -# forecast hour. If there are no storms for the current time, -# then exit. -#------------------------------------------------------------------# - -numvitrecs=$(cat ${DATA}/vitals.upd.${atcfout}.${PDY}${CYL} | wc -l) -if [ ${numvitrecs} -eq 0 ] -then - if [ ${trkrtype} = 'tracker' ] - then - set +x - echo " " - echo "!!! NOTE -- There are no vitals records for this time period " - echo "!!! in the UPDATED vitals file." - echo "!!! It could just be that there are no storms for the current" - echo "!!! time. Please check the dates and submit this job again...." - echo " " - set -x - exit 1 - fi -fi - -set +x -echo " " -echo " *--------------------------------*" -echo " | STORM SELECTION |" -echo " *--------------------------------*" -echo " " -set -x - -ict=1 -while [ $ict -le 15 ] -do - stormflag[${ict}]=3 - let ict=ict+1 -done - -dtg_current="${symd} ${CYL}00" -stormmax=$( grep "${dtg_current}" ${DATA}/vitals.upd.${atcfout}.${PDY}${CYL} | wc -l) - -if [ ${stormmax} -gt 15 ] -then - stormmax=15 -fi - -sct=1 -while [ ${sct} -le ${stormmax} ] -do - stormflag[${sct}]=1 - let sct=sct+1 -done - - -#---------------------------------------------------------------# -# -# -------- "Genesis" Vitals processing -------- -# -# May 2006: This entire genesis tracking system is being -# upgraded to more comprehensively track and categorize storms. -# One thing that has been missing from the tracking system is -# the ability to keep track of storms from one analysis cycle -# to the next. That is, the current system has been very -# effective at tracking systems within a forecast, but we have -# no methods in place for keeping track of storms across -# difference initial times. For example, if we are running -# the tracker on today's 00z GFS analysis, we will get a -# position for various storms at the analysis time. But then -# if we go ahead and run again at 06z, we have no way of -# telling the tracker that we know about the 00z position of -# this storm. We now address that problem by creating -# "genesis" vitals, that is, when a storm is found at an -# analysis time, we not only produce "atcfunix" output to -# detail the track & intensity of a found storm, but we also -# produce a vitals record that will be used for the next -# run of the tracker script. These "genesis vitals" records -# will be of the format: -# -# YYYYMMDDHH_AAAH_LLLLX_TYP -# -# Where: -# -# YYYYMMDDHH = Date the storm was FIRST identified -# by the tracker. -# AAA = Abs(Latitude) * 10; integer value -# H = 'N' for norther hem, 'S' for southern hem -# LLLL = Abs(Longitude) * 10; integer value -# X = 'E' for eastern hem, 'W' for western hem -# TYP = Tropical cyclone storm id if this is a -# tropical cyclone (e.g., "12L", or "09W", etc). -# If this is one that the tracker instead "Found -# On the Fly (FOF)", we simply put those three -# "FOF" characters in there. - -d6ago_ymdh=$( ${NDATE:?} -6 ${PDY}${CYL}) -d6ago_4ymd=$( echo ${d6ago_ymdh} | cut -c1-8) -d6ago_ymd=$( echo ${d6ago_ymdh} | cut -c3-8) -d6ago_hh=$( echo ${d6ago_ymdh} | cut -c9-10) -d6ago_str="${d6ago_ymd} ${d6ago_hh}00" - -d6ahead_ymdh=$( ${NDATE:?} 6 ${PDY}${CYL}) -d6ahead_4ymd=$( echo ${d6ahead_ymdh} | cut -c1-8) -d6ahead_ymd=$( echo ${d6ahead_ymdh} | cut -c3-8) -d6ahead_hh=$( echo ${d6ahead_ymdh} | cut -c9-10) -d6ahead_str="${d6ahead_ymd} ${d6ahead_hh}00" - -syyyym6=$( echo ${d6ago_ymdh} | cut -c1-4) -smmm6=$( echo ${d6ago_ymdh} | cut -c5-6) -sddm6=$( echo ${d6ago_ymdh} | cut -c7-8) -shhm6=$( echo ${d6ago_ymdh} | cut -c9-10) - -syyyyp6=$( echo ${d6ahead_ymdh} | cut -c1-4) -smmp6=$( echo ${d6ahead_ymdh} | cut -c5-6) -sddp6=$( echo ${d6ahead_ymdh} | cut -c7-8) -shhp6=$( echo ${d6ahead_ymdh} | cut -c9-10) - -set +x -echo " " -echo " d6ago_str= --->${d6ago_str}<---" -echo " current_str= --->${current_str}<---" -echo " d6ahead_str= --->${d6ahead_str}<---" -echo " " -echo " for the times 6h ago, current and 6h ahead:" -echo " " -echo " " -set -x - - touch ${DATA}/genvitals.upd.${cmodel}.${atcfout}.${PDY}${CYL} - - -#-----------------------------------------------------------------# -# -# ------ CUT APART INPUT GRIB FILES ------- -# -# For the selected model, cut apart the GRIB input files in order -# to pull out only the variables that we need for the tracker. -# Put these selected variables from all forecast hours into 1 big -# GRIB file that we'll use as input for the tracker. -# -#-----------------------------------------------------------------# - -set +x -echo " " -echo " -----------------------------------------" -echo " NOW CUTTING APART INPUT GRIB FILES TO " -echo " CREATE 1 BIG GRIB INPUT FILE " -echo " -----------------------------------------" -echo " " -set -x - -#gix=$NWPROD/util/exec/grbindex -#g2ix=$NWPROD/util/exec/grb2index -#cgb=$NWPROD/util/exec/copygb -#cgb2=$NWPROD/util/exec/copygb2 - -regflag=$(grep NHC ${DATA}/vitals.upd.${atcfout}.${PDY}${CYL} | wc -l) - -# ---------------------------------------------------------------------- -find_gfile() { - # This subroutine finds an input file from a list of possible - # input filenames, and calls err_exit if no file is found. The - # first file found is returned. - - # Calling conventions: - # find_gfile GFS 30 /path/to/file1.master.pgrbq30.grib2 /path/to/file2.master.pgrbq030.grib2 ... - nicename="$1" - nicehour="$2" - shift 2 - gfile=none - echo "Searching for input $nicename data for forecast hour $nicehour" - set -x - now=$( date +%s ) - later=$(( now + wait_max_time )) - # Note: the loop has only one iteration if --wait-max-time is - # unspecified. That is because later=now - while [[ ! ( "$now" -gt "$later" ) ]] ; do - for gfile in "$@" ; do - if [[ ! -e "$gfile" ]] ; then - set +x - echo "$gfile: does not exist" - set -x - gfile=none - elif [[ ! -s "$gfile" ]] ; then - set +x - echo "$gfile: exists, but is empty" - set -x - gfile=none - else - set +x - echo "$gfile: exists, is non-empty, so I will use this file" - set -x - return 0 - fi - done - now=$( date +%s ) - if [[ "$gfile" == none ]] ; then - if [[ ! ( "$now" -lt "$later" ) ]] ; then - set +x - echo " " - echo " " - echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" - echo " !!! $nicename missing for hour $nicehour" - echo " !!! Check for the existence of these file:" - for gfile in "$@" ; do - echo " !!! $nicename File: $gfile" - done - echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" - echo " " - set -x - err_exit "ERROR: mandatory input GFS file for hour $nicehour is missing or empty. Aborting. Checked for these files: $*" - continue - else - set +x - echo " " - echo " !!! Mandatory input $nicename missing for hour $nicehour" - echo " !!! Will retry after $sleep_time second sleep." - echo " !!! Checked these files:" - for gfile in "$@" ; do - echo " !!! $nicename File: $gfile" - done - echo " " - set -x - sleep $sleep_time - fi - fi - done -} - -# -------------------------------------------------- -# Process GFS or GDAS data -# -------------------------------------------------- -if [[ ${model} -eq 1 || $model == 8 ]] ; then - - export nest_type="fixed" - export trkrebd=360.0 - export trkrwbd=0.0 - export trkrnbd=85.0 - export trkrsbd=-85.0 - rundescr="xxxx" - atcfdescr="xxxx" - - - if [ $loopnum -eq 1 ] - then - - if [ -s ${DATA}/gfsgribfile.${PDY}${CYL} ] - then - rm ${DATA}/gfsgribfile.${PDY}${CYL} - fi - - rm ${DATA}/master.gfsgribfile.${PDY}${CYL}.f* - rm ${DATA}/gfsgribfile.${PDY}${CYL}.f* - >${DATA}/gfsgribfile.${PDY}${CYL} - - set +x - echo " " - echo "Time before gfs wgrib loop is $(date)" - echo " " - set -x - - if [[ "$model" -eq 8 ]] ; then - name=gdas - name1=gdas - nicename=GDAS - else # not model 8, so assume GFS - name=gfs - name1=gfs - nicename=GFS - fi - - for fhour in ${fcsthrs} ; do - fhour=$( echo "$fhour" | bc ) - - if [ ${fhour} -eq $bad_hour ] - then - continue - fi - - fhour00=$( printf %02d "$fhour" ) - fhour000=$( printf %03d "$fhour" ) - fhour0000=$( printf %03d "$fhour" ) - - if [[ "$gribver" == 1 ]] ; then - - find_gfile "$nicename" "$fhour" \ - ${gfsdir}/$name1.t${CYL}z.${flag_pgb}$fhour00 \ - ${gfsdir}/$name1.t${CYL}z.${flag_pgb}$fhour000 \ - ${gfsdir}/pgb${flag_pgb}$fhour00.$name.${symdh} \ - ${gfsdir}/pgrb${flag_pgb}$fhour00.$name.${symdh} - ${WGRIB:?} -s $gfile >gfs.ix - - for parm in ${wgrib_parmlist} - do - case ${parm} in - "SurfaceU") grep "UGRD:10 m " gfs.ix ;; - "SurfaceV") grep "VGRD:10 m " gfs.ix ;; - *) grep "${parm}" gfs.ix ;; - esac - done | ${WGRIB:?} -s $gfile -i -grib -append \ - -o ${DATA}/master.gfsgribfile.${PDY}${CYL}.f${fhour000} - - gfs_master_file=${DATA}/master.gfsgribfile.${PDY}${CYL}.f${fhour000} - gfs_converted_file=${DATA}/gfsgribfile.${PDY}${CYL}.f${fhour000} - gfs_cat_file=${DATA}/gfsgribfile.${PDY}${CYL} -# $cgb -g4 -i2 -x ${gfs_master_file} ${gfs_converted_file} -# cat ${gfs_converted_file} >>${gfs_cat_file} - cat ${gfs_master_file} >>${gfs_cat_file} - - else # gribver is not 1, so assume GRIB2 - - find_gfile "$nicename" "$fhour" \ - ${gfsdir}/$name1.t${CYL}z.pgrb2.0p25.f${fhour000} \ - ${gfsdir}/$name1.t${CYL}z.pgrb2.0p25.f${fhour00} \ - ${gfsdir}/pgb${flag_pgb}$fhour00.$name.${symdh}.grib2 \ - ${gfsdir}/pgrb${flag_pgb}${fhour000}.$name.${symdh}.grib2 - ${WGRIB2:?} -s $gfile >gfs.ix - - for parm in ${wgrib_parmlist} - do - case ${parm} in - "SurfaceU") grep "UGRD:10 m " gfs.ix ;; - "SurfaceV") grep "VGRD:10 m " gfs.ix ;; - *) grep "${parm}" gfs.ix ;; - esac - done | ${WGRIB2:?} -i $gfile -append -grib \ - ${DATA}/master.gfsgribfile.${PDY}${CYL}.f${fhour000} - - gfs_master_file=${DATA}/master.gfsgribfile.${PDY}${CYL}.f${fhour000} - gfs_converted_file=${DATA}/gfsgribfile.${PDY}${CYL}.f${fhour000} - gfs_cat_file=${DATA}/gfsgribfile.${PDY}${CYL} - - ${GRB2INDEX:?} ${gfs_master_file} ${gfs_master_file}.ix - - g1=${gfs_master_file} - x1=${gfs_master_file}.ix - -# grid4="0 6 0 0 0 0 0 0 720 361 0 0 90000000 0 48 -90000000 359500000 500000 500000 0" -# $cgb2 -g "${grid4}" ${g1} ${x1} ${gfs_converted_file} -# cat ${gfs_converted_file} >>${gfs_cat_file} - - cat ${gfs_master_file} >>${gfs_cat_file} - - fi - - done - - if [ ${gribver} -eq 1 ]; then - ${GRBINDEX:?} ${DATA}/gfsgribfile.${PDY}${CYL} ${DATA}/gfsixfile.${PDY}${CYL} - else - ${GRB2INDEX:?} ${DATA}/gfsgribfile.${PDY}${CYL} ${DATA}/gfsixfile.${PDY}${CYL} - fi - -# -------------------------------------------- - - if [[ "$PhaseFlag" == y ]] ; then - - catfile=${DATA}/gfs.${PDY}${CYL}.catfile - >${catfile} - - for fhour in ${fcsthrs} - do - - - fhour=$( echo "$fhour" | bc ) - - if [ ${fhour} -eq $bad_hour ] - then - continue - fi - - fhour00=$( printf %02d "$fhour" ) - fhour000=$( printf %03d "$fhour" ) - fhour0000=$( printf %03d "$fhour" ) - - set +x - echo " " - echo "Date in interpolation for model= $cmodel and fhour= $fhour000 before = $(date)" - echo " " - set -x - - gfile=${DATA}/gfsgribfile.${PDY}${CYL} - ifile=${DATA}/gfsixfile.${PDY}${CYL} - - if [ ${gribver} -eq 1 ]; then - ${GRBINDEX:?} $gfile $ifile - else - ${GRB2INDEX:?} $gfile $ifile - fi - - gparm=7 - namelist=${DATA}/vint_input.${PDY}${CYL}.z - echo "&timein ifcsthour=${fhour000}," >${namelist} - echo " iparm=${gparm}," >>${namelist} - echo " gribver=${gribver}," >>${namelist} - echo " g2_jpdtn=${g2_jpdtn}/" >>${namelist} - - ln -s -f ${gfile} fort.11 - ln -s -f ${FIXRELO}/gfs_hgt_levs.txt fort.16 - ln -s -f ${ifile} fort.31 - ln -s -f ${DATA}/${cmodel}.${PDY}${CYL}.z.f${fhour000} fort.51 - - ${exectrkdir}/vint.x <${namelist} - rcc1=$? - - - gparm=11 - namelist=${DATA}/vint_input.${PDY}${CYL}.t - echo "&timein ifcsthour=${fhour000}," >${namelist} - echo " iparm=${gparm}," >>${namelist} - echo " gribver=${gribver}," >>${namelist} - echo " g2_jpdtn=${g2_jpdtn}/" >>${namelist} - - ln -s -f ${gfile} fort.11 - ln -s -f ${FIXRELO}/gfs_tmp_levs.txt fort.16 - ln -s -f ${ifile} fort.31 - ln -s -f ${DATA}/${cmodel}.${PDY}${CYL}.t.f${fhour000} fort.51 - - ${exectrkdir}/vint.x <${namelist} - rcc2=$? - - namelist=${DATA}/tave_input.${PDY}${CYL} - echo "&timein ifcsthour=${fhour000}," >${namelist} - echo " iparm=${gparm}," >>${namelist} - echo " gribver=${gribver}," >>${namelist} - echo " g2_jpdtn=${g2_jpdtn}/" >>${namelist} - - ffile=${DATA}/${cmodel}.${PDY}${CYL}.t.f${fhour000} - ifile=${DATA}/${cmodel}.${PDY}${CYL}.t.f${fhour000}.i - - if [ ${gribver} -eq 1 ]; then - ${GRBINDEX:?} ${ffile} ${ifile} - else - ${GRB2INDEX:?} ${ffile} ${ifile} - fi - - ln -s -f ${ffile} fort.11 - ln -s -f ${ifile} fort.31 - ln -s -f ${DATA}/${cmodel}.tave.${PDY}${CYL}.f${fhour000} fort.51 - ln -s -f ${DATA}/${cmodel}.tave92.${PDY}${CYL}.f${fhour000} fort.92 - - ${exectrkdir}/tave.x <${namelist} - rcc3=$? - - if [ $rcc1 -eq 0 -a $rcc2 -eq 0 -a $rcc3 -eq 0 ]; then - echo " " - else - mailfile=${rundir}/errmail.${cmodel}.${PDY}${CYL} - echo "CPS/WC interp failure for $cmodel ${PDY}${CYL}" >${mailfile} - mail -s "GFS Failure (CPS/WC int) $cmodel ${PDY}${CYL}" ${userid} <${mailfile} - exit 8 - fi - - tavefile=${DATA}/${cmodel}.tave.${PDY}${CYL}.f${fhour000} - zfile=${DATA}/${cmodel}.${PDY}${CYL}.z.f${fhour000} - cat ${zfile} ${tavefile} >>${catfile} -## rm $tavefile $zfile - - set +x - echo " " - echo "Date in interpolation for cmodel= $cmodel and fhour= $fhour000 after = $(date)" - echo " " - set -x - - done - fi # end of "If PhaseFlag is on" - fi # end of "If loopnum is 1" - - gfile=${DATA}/gfsgribfile.${PDY}${CYL} - ifile=${DATA}/gfsixfile.${PDY}${CYL} - - if [[ "$PhaseFlag" == y ]] ; then - cat ${catfile} >>${gfile} - if [ ${gribver} -eq 1 ]; then - ${GRBINDEX:?} ${gfile} ${ifile} - else - ${GRB2INDEX:?} ${gfile} ${ifile} - fi - fi - - # File names for input to tracker: - gribfile=${DATA}/gfsgribfile.${PDY}${CYL} - ixfile=${DATA}/gfsixfile.${PDY}${CYL} -fi - -$postmsg "$jlogfile" "SUCCESS: have all inputs needed to run tracker. Will now run the tracker." - -#------------------------------------------------------------------------# -# Now run the tracker # -#------------------------------------------------------------------------# - -ist=1 -while [ $ist -le 15 ] -do - if [ ${stormflag[${ist}]} -ne 1 ] - then - set +x; echo "Storm number $ist NOT selected for processing"; set -x - else - set +x; echo "Storm number $ist IS selected for processing...."; set -x - fi - let ist=ist+1 -done - -namelist=${DATA}/input.${atcfout}.${PDY}${CYL} -ATCFNAME=$( echo "${atcfname}" | tr '[a-z]' '[A-Z]') - -if [ ${cmodel} = 'sref' ]; then - export atcfymdh=$( ${NDATE:?} -3 ${scc}${syy}${smm}${sdd}${shh}) -else - export atcfymdh=${scc}${syy}${smm}${sdd}${shh} -fi - -contour_interval=100.0 -write_vit=n -want_oci=.TRUE. - -cat < ${namelist} -&datein inp%bcc=${scc},inp%byy=${syy},inp%bmm=${smm}, - inp%bdd=${sdd},inp%bhh=${shh},inp%model=${model}, - inp%modtyp='${modtyp}', - inp%lt_units='${lead_time_units}', - inp%file_seq='${file_sequence}', - inp%nesttyp='${nest_type}'/ -&atcfinfo atcfnum=${atcfnum},atcfname='${ATCFNAME}', - atcfymdh=${atcfymdh},atcffreq=${atcffreq}/ -&trackerinfo trkrinfo%westbd=${trkrwbd}, - trkrinfo%eastbd=${trkrebd}, - trkrinfo%northbd=${trkrnbd}, - trkrinfo%southbd=${trkrsbd}, - trkrinfo%type='${trkrtype}', - trkrinfo%mslpthresh=${mslpthresh}, - trkrinfo%v850thresh=${v850thresh}, - trkrinfo%gridtype='${modtyp}', - trkrinfo%contint=${contour_interval}, - trkrinfo%want_oci=${want_oci}, - trkrinfo%out_vit='${write_vit}', - trkrinfo%gribver=${gribver}, - trkrinfo%g2_jpdtn=${g2_jpdtn}/ -&phaseinfo phaseflag='${PHASEFLAG}', - phasescheme='${PHASE_SCHEME}', - wcore_depth=${WCORE_DEPTH}/ -&structinfo structflag='${STRUCTFLAG}', - ikeflag='${IKEFLAG}'/ -&fnameinfo gmodname='${atcfname}', - rundescr='${rundescr}', - atcfdescr='${atcfdescr}'/ -&verbose verb=3/ -&waitinfo use_waitfor='n', - wait_min_age=10, - wait_min_size=100, - wait_max_wait=1800, - wait_sleeptime=5, - per_fcst_command=''/ -EOF - -export pgm=gettrk -. $prep_step - -ln -s -f ${gribfile} fort.11 -ln -s -f ${DATA}/vitals.upd.${atcfout}.${PDY}${shh} fort.12 -ln -s -f ${DATA}/genvitals.upd.${cmodel}.${atcfout}.${PDY}${CYL} fort.14 -ihour=1 -for fhour in ${fcsthrs} ; do - fhour=$( echo "$fhour" | bc ) # strip leading zeros - printf "%4d %5d\n" $ihour $(( fhour * 60 )) - let ihour=ihour+1 -done > leadtimes.txt -ln -s -f leadtimes.txt fort.15 -#ln -s -f ${FIXRELO}/${cmodel}.tracker_leadtimes fort.15 -ln -s -f ${ixfile} fort.31 - -if [[ -z "$atcfout" ]] ; then - err_exit 'ERROR: exgfs_trkr script forgot to set $atcfout variable' -fi - -track_file_path=nowhere - -if [ ${trkrtype} = 'tracker' ]; then - if [ ${atcfout} = 'gfdt' -o ${atcfout} = 'gfdl' -o \ - ${atcfout} = 'hwrf' -o ${atcfout} = 'hwft' ]; then - ln -s -f ${DATA}/trak.${atcfout}.all.${stormenv}.${PDY}${CYL} fort.61 - ln -s -f ${DATA}/trak.${atcfout}.atcf.${stormenv}.${PDY}${CYL} fort.62 - ln -s -f ${DATA}/trak.${atcfout}.radii.${stormenv}.${PDY}${CYL} fort.63 - ln -s -f ${DATA}/trak.${atcfout}.atcf_gen.${stormenv}.${PDY}${CYL} fort.66 - ln -s -f ${DATA}/trak.${atcfout}.atcf_sink.${stormenv}.${PDY}${CYL} fort.68 - ln -s -f ${DATA}/trak.${atcfout}.atcf_hfip.${stormenv}.${PDY}${CYL} fort.69 - track_file_path=${DATA}/trak.${atcfout}.atcfunix.${stormenv}.${PDY}${CYL} - else - ln -s -f ${DATA}/trak.${atcfout}.all.${PDY}${CYL} fort.61 - ln -s -f ${DATA}/trak.${atcfout}.atcf.${PDY}${CYL} fort.62 - ln -s -f ${DATA}/trak.${atcfout}.radii.${PDY}${CYL} fort.63 - ln -s -f ${DATA}/trak.${atcfout}.atcf_gen.${PDY}${CYL} fort.66 - ln -s -f ${DATA}/trak.${atcfout}.atcf_sink.${PDY}${CYL} fort.68 - ln -s -f ${DATA}/trak.${atcfout}.atcf_hfip.${PDY}${CYL} fort.69 - track_file_path=${DATA}/trak.${atcfout}.atcfunix.${PDY}${CYL} - fi -else - ln -s -f ${DATA}/trak.${atcfout}.all.${regtype}.${PDY}${CYL} fort.61 - ln -s -f ${DATA}/trak.${atcfout}.atcf.${regtype}.${PDY}${CYL} fort.62 - ln -s -f ${DATA}/trak.${atcfout}.radii.${regtype}.${PDY}${CYL} fort.63 - ln -s -f ${DATA}/trak.${atcfout}.atcf_gen.${regtype}.${PDY}${CYL} fort.66 - ln -s -f ${DATA}/trak.${atcfout}.atcf_sink.${regtype}.${PDY}${CYL} fort.68 - ln -s -f ${DATA}/trak.${atcfout}.atcf_hfip.${regtype}.${PDY}${CYL} fort.69 - track_file_path=${DATA}/trak.${atcfout}.atcfunix.${regtype}.${PDY}${CYL} -fi - -if [[ "$track_file_path" == nowhere ]] ; then - err_exit 'ERROR: exgfs_trkr script forgot to set $track_file_path variable' -fi - -ln -s -f $track_file_path fort.64 - -if [ ${atcfname} = 'aear' ] -then - ln -s -f ${DATA}/trak.${atcfout}.initvitl.${PDY}${CYL} fort.65 -fi - -if [ ${write_vit} = 'y' ] -then - ln -s -f ${DATA}/output_genvitals.${atcfout}.${PDY}${shh} fort.67 -fi - -if [ ${PHASEFLAG} = 'y' ]; then - if [ ${atcfout} = 'gfdt' -o ${atcfout} = 'gfdl' -o \ - ${atcfout} = 'hwrf' -o ${atcfout} = 'hwft' ]; then - ln -s -f ${DATA}/trak.${atcfout}.cps_parms.${stormenv}.${PDY}${CYL} fort.71 - else - ln -s -f ${DATA}/trak.${atcfout}.cps_parms.${PDY}${CYL} fort.71 - fi -fi - -if [ ${STRUCTFLAG} = 'y' ]; then - if [ ${atcfout} = 'gfdt' -o ${atcfout} = 'gfdl' -o \ - ${atcfout} = 'hwrf' -o ${atcfout} = 'hwft' ]; then - ln -s -f ${DATA}/trak.${atcfout}.structure.${stormenv}.${PDY}${CYL} fort.72 - ln -s -f ${DATA}/trak.${atcfout}.fractwind.${stormenv}.${PDY}${CYL} fort.73 - ln -s -f ${DATA}/trak.${atcfout}.pdfwind.${stormenv}.${PDY}${CYL} fort.76 - else - ln -s -f ${DATA}/trak.${atcfout}.structure.${PDY}${CYL} fort.72 - ln -s -f ${DATA}/trak.${atcfout}.fractwind.${PDY}${CYL} fort.73 - ln -s -f ${DATA}/trak.${atcfout}.pdfwind.${PDY}${CYL} fort.76 - fi -fi - -if [ ${IKEFLAG} = 'y' ]; then - if [ ${atcfout} = 'gfdt' -o ${atcfout} = 'gfdl' -o \ - ${atcfout} = 'hwrf' -o ${atcfout} = 'hwft' ]; then - ln -s -f ${DATA}/trak.${atcfout}.ike.${stormenv}.${PDY}${CYL} fort.74 - else - ln -s -f ${DATA}/trak.${atcfout}.ike.${PDY}${CYL} fort.74 - fi -fi - -if [ ${trkrtype} = 'midlat' -o ${trkrtype} = 'tcgen' ]; then - ln -s -f ${DATA}/trkrmask.${atcfout}.${regtype}.${PDY}${CYL} fort.77 -fi - - -set +x -echo " " -echo " -----------------------------------------------" -echo " NOW EXECUTING TRACKER......" -echo " -----------------------------------------------" -echo " " -set -x - -msg="$pgm start for $atcfout at ${CYL}z" -$postmsg "$jlogfile" "$msg" - -set +x -echo "+++ TIMING: BEFORE gettrk ---> $(date)" -set -x - -set +x -echo " " -echo "TIMING: Before call to gettrk at $(date)" -echo " " -set -x - -##/usrx/local/bin/getrusage -a /hwrf/save/Qingfu.Liu/trak/para/exec/gettrk <${namelist} - -${exectrkdir}/gettrk <${namelist} | tee gettrk.log -gettrk_rcc=$? - -set +x -echo " " -echo "TIMING: After call to gettrk at $(date)" -echo " " -set -x - -set +x -echo "+++ TIMING: AFTER gettrk ---> $(date)" -set -x - -#--------------------------------------------------------------# -# Send a message to the jlogfile for each storm that used -# tcvitals for hour 0 track/intensity info. -#--------------------------------------------------------------# - -pcount=0 -cat gettrk.log | grep -a 'NOTE: TCVITALS_USED_FOR_ATCF_F00' | \ -while read line -do - echo "line is [$line]" - if [[ ! ( "$pcount" -lt 30 ) ]] ; then - $postmsg "$jlogfile" "Hit maximum number of postmsg commands for tcvitals usage at hour 0. Will stop warning about that, to avoid spamming jlogfile." - break - fi - $postmsg "$jlogfile" "$line" - pcount=$(( pcount + 1 )) -done - -#--------------------------------------------------------------# -# Now copy the output track files to different directories -#--------------------------------------------------------------# - -set +x -echo " " -echo " -----------------------------------------------" -echo " NOW COPYING OUTPUT TRACK FILES TO COM " -echo " -----------------------------------------------" -echo " " -set -x - -if [[ ! -e "$track_file_path" ]] ; then - $postmsg "$jlogfile" "WARNING: tracker output file does not exist. This is probably an error. File: $track_file_path" - $postmsg "$jlogfile" "WARNING: exgfs_trkr will create an empty track file and deliver that." - cat /dev/null > $track_file_path -elif [[ ! -s "$track_file_path" ]] ; then - $postmsg "$jlogfile" "WARNING: tracker output file is empty. That is only an error if there are storms or genesis cases somewhere in the world. File: $track_file_path" -else - $postmsg "$jlogfile" "SUCCESS: Track file exists and is non-empty: $track_file" - if [[ "$PHASEFLAG" == n ]] ; then - echo "Phase information was disabled. I will remove the empty phase information from the track file before delivery." - cp -p $track_file_path $track_file_path.orig - cut -c1-112 < $track_file_path.orig > $track_file_path - if [[ ! -s "$track_file_path" ]] ; then - $postmsg "$jlogfile" "WARNING: Something went wrong with \"cut\" command to remove phase information. Will deliver original file." - /bin/mv -f $track_file_path.orig $track_file_path - else - $postmsg "$jlogfile" "SUCCESS: Removed empty phase information because phase information is disabled." - fi - fi -fi - -#mkdir /global/save/Qingfu.Liu/gfspara_track/gfs.${PDY}${CYL} -#cp /ptmpp1/Qingfu.Liu/trakout2/${PDY}${CYL}/gfs/trak.gfso.atcf* /global/save/Qingfu.Liu/gfspara_track/gfs.${PDY}${CYL}/. -#rm -rf /ptmpp1/Qingfu.Liu/trakout2/${PDY}${CYL}/gfs/* - -if [ ${gettrk_rcc} -eq 0 ]; then - - if [ -s ${DATA}/output_genvitals.${atcfout}.${PDY}${shh} ]; then - cat ${DATA}/output_genvitals.${atcfout}.${PDY}${shh} >>${genvitfile} - fi - - if [ ${PARAFLAG} = 'YES' ] - then - - if [[ ! -s "$track_file_path" ]] ; then - $postmsg "$jlogfile" "WARNING: delivering empty track file to rundir." - fi - - cp $track_file_path ../. - cat $track_file_path >> \ - ${rundir}/${cmodel}.atcfunix.${syyyy} - if [ ${cmodel} = 'gfs' ]; then - cat ${rundir}/${cmodel}.atcfunix.${syyyy} | sed -e "s/ GFSO/ AVNO/g" >>${rundir}/avn.atcfunix.${syyyy} - fi -# cp ${DATA}/trak.${atcfout}.atcf_sink.${regtype}.${PDY}${CYL} ../. -# cp ${DATA}/trak.${atcfout}.atcf_gen.${regtype}.${PDY}${CYL} ../. - fi - - msg="$pgm end for $atcfout at ${CYL}z completed normally" - $postmsg "$jlogfile" "$msg" - -# Now copy track files into various archives.... - - if [ ${SENDCOM} = 'YES' ] - then - - if [[ ! -s "$track_file_path" ]] ; then - $postmsg "$jlogfile" "WARNING: delivering an empty track file to COM." - return - fi - - glatuxarch=${glatuxarch:-${gltrkdir}/tracks.atcfunix.${syy}} - - cat $track_file_path >>${glatuxarch} - if [ ${cmodel} = 'gfs' ]; then - cat $track_file_path | sed -e "s/ GFSO/ AVNO/g" >>${glatuxarch} - fi - - if [ ${PARAFLAG} = 'YES' ] - then - echo " " - tmatuxarch=${tmatuxarch:-/gpfs/gd2/emc/hwrf/save/${userid}/trak/prod/tracks.atcfunix.${syy}} - cat $track_file_path >>${tmatuxarch} - if [ ${cmodel} = 'gfs' ]; then - cat $track_file_path | sed -e "s/ GFSO/ AVNO/g" >>${tmatuxarch} - fi - else - - if [ ${cmodel} = 'gfdl' ] - then - cp $track_file_path ${COM}/${stormenv}.${PDY}${CYL}.trackeratcfunix - else - cp $track_file_path ${COM}/${atcfout}.t${CYL}z.cyclone.trackatcfunix - if [ ${cmodel} = 'gfs' ]; then - cat $track_file_path | sed -e "s/ GFSO/ AVNO/g" >${COM}/avn.t${CYL}z.cyclone.trackatcfunix - fi - fi - - tmscrdir=/gpfs/gd2/emc/hwrf/save/${userid}/trak/prod - - tmtrakstat=${tmscrdir}/tracker.prod.status - echo "${atcfout} tracker completed okay for ${PDY}${CYL}" >>${tmtrakstat} - - export SENDDBN=${SENDDBN:-YES} - if [ ${SENDDBN} = 'YES' ] - then - if [ ${cmodel} = 'gfdl' ] - then - $DBNROOT/bin/dbn_alert ATCFUNIX GFS_NAVY $job ${COM}/${stormenv}.${PDY}${CYL}.trackeratcfunix - else - $DBNROOT/bin/dbn_alert ATCFUNIX GFS_NAVY $job ${COM}/${atcfout}.t${CYL}z.cyclone.trackatcfunix - if [ ${cmodel} = 'gfs' ]; then - $DBNROOT/bin/dbn_alert ATCFUNIX GFS_NAVY $job ${COM}/avn.t${CYL}z.cyclone.trackatcfunix - fi - fi - fi - - if [[ "$SENDNHC" == YES ]] ; then - # We need to parse apart the atcfunix file and distribute the forecasts to - # the necessary directories. To do this, first sort the atcfunix records - # by forecast hour (k6), then sort again by ocean basin (k1), storm number (k2) - # and then quadrant radii wind threshold (k12). Once you've got that organized - # file, break the file up by putting all the forecast records for each storm - # into a separate file. Then, for each file, find the corresponding atcfunix - # file in the /nhc/com/prod/atcf directory and dump the atcfunix records for that - # storm in there. - - if [ ${cmodel} = 'gfdl' ] - then - auxfile=${COM}/${stormenv}.${PDY}${CYL}.trackeratcfunix - else - auxfile=$track_file_path - fi - - sort -k6 ${auxfile} | sort -k1 -k2 -k12 >atcfunix.sorted - - old_string="XX, XX" - - ict=0 - while read unixrec - do - storm_string=$( echo "${unixrec}" | cut -c1-6) - if [ "${storm_string}" = "${old_string}" ] - then - echo "${unixrec}" >>atcfunix_file.${ict} - else - let ict=ict+1 - echo "${unixrec}" >atcfunix_file.${ict} - old_string="${storm_string}" - fi - done >${ATCFdir}/${at}${NO}${syyyy}/a${at}${NO}${syyyy}.dat - cat atcfunix_file.$mct >>${ATCFdir}/${at}${NO}${syyyy}/a${at}${NO}${syyyy}.dat - cat atcfunix_file.$mct >>${ATCFdir}/${at}${NO}${syyyy}/ncep_a${at}${NO}${syyyy}.dat - if [ ${cmodel} = 'gfs' ]; then - cat atcfunix_file.$mct | sed -e "s/ GFSO/ AVNO/g" >>${ATCFdir}/${at}${NO}${syyyy}/a${at}${NO}${syyyy}.dat - cat atcfunix_file.$mct | sed -e "s/ GFSO/ AVNO/g" >>${ATCFdir}/${at}${NO}${syyyy}/ncep_a${at}${NO}${syyyy}.dat - fi - set +x - echo " " - echo "+++ Adding records to TPC ATCFUNIX directory: /tpcprd/atcf_unix/${at}${NO}${syyyy}" - echo " " - set -x - else - set +x - echo " " - echo "There is no TPC ATCFUNIX directory for: /tpcprd/atcf_unix/${at}${NO}${syyyy}" - set -x - fi - done - fi - fi - fi - - fi - -else - - if [ ${PARAFLAG} = 'YES' ] - then - echo " " - else - tmtrakstat=/gpfs/gd2/emc/hwrf/save/${userid}/trak/prod/tracker.prod.status - echo "ERROR: ${atcfout} tracker FAILED for ${PDY}${CYL}" >>${tmtrakstat} - fi - - set +x - echo " " - echo "!!! ERROR -- An error occurred while running gettrk.x, " - echo "!!! which is the program that actually gets the track." - echo "!!! Return code from gettrk.x = ${gettrk_rcc}" - echo "!!! model= ${atcfout}, forecast initial time = ${PDY}${CYL}" - echo "!!! Exiting...." - echo " " - set -x - err_exit " FAILED ${jobid} - ERROR RUNNING GETTRK IN TRACKER SCRIPT- ABNORMAL EXIT" - -fi diff --git a/ush/global_savefits.sh b/ush/global_savefits.sh index 9efbf778af..f26132dd8a 100755 --- a/ush/global_savefits.sh +++ b/ush/global_savefits.sh @@ -1,9 +1,10 @@ -#!/bin/ksh -set -xeua +#! /usr/bin/env bash ######################################################## # save fit and horiz files for all analysis cycles ######################################################## +source "$HOMEgfs/ush/preamble.sh" + export FIT_DIR=${FIT_DIR:-$COMOUT/fits} export HORZ_DIR=${HORZ_DIR:-$COMOUT/horiz} export fh1=06 diff --git a/ush/gsi_utils.py b/ush/gsi_utils.py new file mode 100644 index 0000000000..97d66e8ace --- /dev/null +++ b/ush/gsi_utils.py @@ -0,0 +1,136 @@ +# gsi_utils.py +# a collection of functions, classes, etc. +# used for the GSI global analysis + +def isTrue(str_in): + """ isTrue(str_in) + - function to translate shell variables to python logical variables + + input: str_in - string (should be like 'YES', 'TRUE', etc.) + returns: status (logical True or False) + + """ + str_in = str_in.upper() + if str_in in ['YES', '.TRUE.']: + status = True + else: + status = False + return status + + +def link_file(from_file, to_file): + """ link_file(from_file, to_file) + - function to check if a path exists, and if not, make a symlink + input: from_file - string path + to_file - string path + """ + import os + if not os.path.exists(to_file): + if not os.path.islink(to_file): + os.symlink(from_file, to_file) + else: + print(to_file + " exists, unlinking.") + os.unlink(to_file) + os.symlink(from_file, to_file) + print("ln -s " + from_file + " " + to_file) + + +def copy_file(from_file, to_file): + import shutil + shutil.copy(from_file, to_file) + print("cp " + from_file + " " + to_file) + + +def make_dir(directory): + import os + os.makedirs(directory) + print("mkdir -p " + directory) + + +def write_nml(nml_dict, nml_file): + """ write_nml(nml_dict, nml_file) + - function to write out namelist dictionary nml_dict to file nml_file + input: nml_dict - dictionary of dictionaries + first dictionary is &nml, second is nmlvar='value' + NOTE: this shoudl be an OrderedDict or else it might fail + nml_file - string path to write namelist file to + """ + nfile = open(nml_file, 'w') + + for nml, nmlvars in nml_dict.items(): + nfile.write('&' + nml + '\n') + for var, val in nmlvars.items(): + nfile.write(' ' + str(var) + ' = ' + str(val) + '\n') + nfile.write('/\n\n') + nfile.close() + + +def get_ncdims(ncfile): + """ get_ncdims(ncfile) + - function to return dictionary of netCDF file dimensions and their lengths + input: ncfile - string to path to netCDF file + output: ncdims - dictionary where key is the name of a dimension and the + value is the length of that dimension + + ex: ncdims['pfull'] = 127 + """ + try: + import netCDF4 as nc + except ImportError as err: + raise ImportError(f"Unable to import netCDF4 module\n{err}") + ncf = nc.Dataset(ncfile) + ncdims = {} + for d in ncf.dimensions.keys(): + ncdims[d] = int(len(ncf.dimensions[d])) + ncf.close() + + return ncdims + + +def get_nemsdims(nemsfile, nemsexe): + """ get_nemsdims(nemsfile,nemsexe) + - function to return dictionary of NEMSIO file dimensions for use + input: nemsfile - string to path nemsio file + nemsexe - string to path nemsio_get executable + output: nemsdims - dictionary where key is the name of a dimension and the + value is the length of that dimension + ex: nemsdims['pfull'] = 127 + """ + import subprocess + ncdims = { + 'dimx': 'grid_xt', + 'dimy': 'grid_yt', + 'dimz': 'pfull', + } + nemsdims = {} + for dim in ['dimx', 'dimy', 'dimz']: + out = subprocess.Popen([nemsexe, nemsfile, dim], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + stdout, stderr = out.communicate() + nemsdims[ncdims[dim]] = int(stdout.split(' ')[-1].rstrip()) + return nemsdims + + +def get_timeinfo(ncfile): + """ get_timeinfo(ncfile) + - function to return datetime objects of initialized time and valid time + input: ncfile - string to path to netCDF file + returns: inittime, validtime - datetime objects + nfhour - integer forecast hour + """ + try: + import netCDF4 as nc + except ImportError as err: + raise ImportError(f"Unable to import netCDF4 module\n{err}") + import datetime as dt + import re + ncf = nc.Dataset(ncfile) + time_units = ncf['time'].units + date_str = time_units.split('since ')[1] + date_str = re.sub("[^0-9]", "", date_str) + initstr = date_str[0:10] + inittime = dt.datetime.strptime(initstr, "%Y%m%d%H") + nfhour = int(ncf['time'][0]) + validtime = inittime + dt.timedelta(hours=nfhour) + ncf.close() + + return inittime, validtime, nfhour diff --git a/ush/hpssarch_gen.sh b/ush/hpssarch_gen.sh index ab90f3351a..f1a161c26d 100755 --- a/ush/hpssarch_gen.sh +++ b/ush/hpssarch_gen.sh @@ -1,36 +1,25 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash ################################################### # Fanglin Yang, 20180318 # --create bunches of files to be archived to HPSS ################################################### - +source "${HOMEgfs}/ush/preamble.sh" type=${1:-gfs} ##gfs, gdas, enkfgdas or enkfggfs -CDATE=${CDATE:-2018010100} -PDY=$(echo $CDATE | cut -c 1-8) -cyc=$(echo $CDATE | cut -c 9-10) -OUTPUT_FILE=${OUTPUT_FILE:-"netcdf"} ARCH_GAUSSIAN=${ARCH_GAUSSIAN:-"YES"} ARCH_GAUSSIAN_FHMAX=${ARCH_GAUSSIAN_FHMAX:-36} ARCH_GAUSSIAN_FHINC=${ARCH_GAUSSIAN_FHINC:-6} -SUFFIX=${SUFFIX:-".nc"} -if [ $SUFFIX = ".nc" ]; then - format="netcdf" -else - format="nemsio" -fi # Set whether to archive downstream products DO_DOWN=${DO_DOWN:-"NO"} -if [ $DO_BUFRSND = "YES" -o $WAFSF = "YES" ]; then +if [[ ${DO_BUFRSND} = "YES" || ${WAFSF} = "YES" ]]; then export DO_DOWN="YES" fi #----------------------------------------------------- -if [ $type = "gfs" ]; then +if [[ ${type} = "gfs" ]]; then #----------------------------------------------------- FHMIN_GFS=${FHMIN_GFS:-0} FHMAX_GFS=${FHMAX_GFS:-384} @@ -45,49 +34,52 @@ if [ $type = "gfs" ]; then touch gfsb.txt touch gfs_restarta.txt - if [ $ARCH_GAUSSIAN = "YES" ]; then + if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then rm -f gfs_pgrb2b.txt - rm -f gfs_${format}b.txt + rm -f gfs_netcdfb.txt rm -f gfs_flux.txt touch gfs_pgrb2b.txt - touch gfs_${format}b.txt + touch gfs_netcdfb.txt touch gfs_flux.txt - if [ $MODE = "cycled" ]; then - rm -f gfs_${format}a.txt - touch gfs_${format}a.txt + if [[ ${MODE} = "cycled" ]]; then + rm -f gfs_netcdfa.txt + touch gfs_netcdfa.txt fi fi - if [ $DO_DOWN = "YES" ]; then + if [[ ${DO_DOWN} = "YES" ]]; then rm -f gfs_downstream.txt touch gfs_downstream.txt fi - dirpath="gfs.${PDY}/${cyc}/atmos/" - dirname="./${dirpath}" - head="gfs.t${cyc}z." - if [ $ARCH_GAUSSIAN = "YES" ]; then - echo "${dirname}${head}pgrb2b.0p25.anl " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.0p25.anl.idx " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.1p00.anl " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.1p00.anl.idx " >>gfs_pgrb2b.txt - - if [ $MODE = "cycled" ]; then - echo "${dirname}${head}atmanl${SUFFIX} " >>gfs_${format}a.txt - echo "${dirname}${head}sfcanl${SUFFIX} " >>gfs_${format}a.txt - echo "${dirname}${head}atmi*.nc " >>gfs_${format}a.txt - echo "${dirname}${head}dtfanl.nc " >>gfs_${format}a.txt - echo "${dirname}${head}loginc.txt " >>gfs_${format}a.txt + if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2b.1p00.anl.idx" + } >> gfs_pgrb2b.txt + + if [[ ${MODE} = "cycled" ]]; then + { + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}sfcanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmi*.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}dtfanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}loginc.txt" + } >> gfs_netcdfa.txt fi fh=0 - while [ $fh -le $ARCH_GAUSSIAN_FHMAX ]; do - fhr=$(printf %03i $fh) - echo "${dirname}${head}atmf${fhr}${SUFFIX} " >>gfs_${format}b.txt - echo "${dirname}${head}sfcf${fhr}${SUFFIX} " >>gfs_${format}b.txt + while (( fh <= ARCH_GAUSSIAN_FHMAX )); do + fhr=$(printf %03i "${fh}") + { + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" + } >> gfs_netcdfb.txt fh=$((fh+ARCH_GAUSSIAN_FHINC)) done fi @@ -95,146 +87,166 @@ if [ $type = "gfs" ]; then #.................. # Exclude the gfsarch.log file, which will change during the tar operation # This uses the bash extended globbing option - echo "./logs/${CDATE}/gfs!(arch).log " >>gfsa.txt - echo "${dirname}input.nml " >>gfsa.txt - if [ $MODE = "cycled" ]; then - echo "${dirname}${head}gsistat " >>gfsa.txt - echo "${dirname}${head}nsstbufr " >>gfsa.txt - echo "${dirname}${head}prepbufr " >>gfsa.txt - echo "${dirname}${head}prepbufr.acft_profiles " >>gfsa.txt - fi - echo "${dirname}${head}pgrb2.0p25.anl " >>gfsa.txt - echo "${dirname}${head}pgrb2.0p25.anl.idx " >>gfsa.txt - #Only generated if there are cyclones to track - cyclone_files=(avno.t${cyc}z.cyclone.trackatcfunix - avnop.t${cyc}z.cyclone.trackatcfunix - trak.gfso.atcfunix.${PDY}${cyc} - trak.gfso.atcfunix.altg.${PDY}${cyc} - storms.gfso.atcf_gen.${PDY}${cyc} - storms.gfso.atcf_gen.altg.${PDY}${cyc}) - - for file in ${cyclone_files[@]}; do - [[ -s $ROTDIR/${dirname}${file} ]] && echo "${dirname}${file}" >>gfsa.txt - done + { + echo "./logs/${PDY}${cyc}/gfs!(arch).log" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/input.nml" + + if [[ ${MODE} = "cycled" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}gsistat" + echo "${COM_OBS/${ROTDIR}\//}/${head}nsstbufr" + echo "${COM_OBS/${ROTDIR}\//}/${head}prepbufr" + echo "${COM_OBS/${ROTDIR}\//}/${head}prepbufr.acft_profiles" + fi - if [ $DO_DOWN = "YES" ]; then - if [ $DO_BUFRSND = "YES" ]; then - echo "${dirname}gempak/gfs_${PDY}${cyc}.sfc " >>gfs_downstream.txt - echo "${dirname}gempak/gfs_${PDY}${cyc}.snd " >>gfs_downstream.txt - echo "${dirname}wmo/gfs_collective*.postsnd_${cyc} " >>gfs_downstream.txt - echo "${dirname}bufr.t${cyc}z " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.bufrsnd.tar.gz " >>gfs_downstream.txt - fi - if [ $WAFSF = "YES" ]; then - echo "${dirname}wafsgfs*.t${cyc}z.gribf*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.wafs_grb45f*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.wafs_grb45f*.nouswafs.grib2 " >>gfs_downstream.txt - echo "${dirname}WAFS_blended_${PDY}${cyc}f*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t*z.gcip.f*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.wafs_0p25.f*.grib2 " >>gfs_downstream.txt - echo "${dirname}gfs.t${cyc}z.wafs_0p25_unblended.f*.grib2" >>gfs_downstream.txt - echo "${dirname}WAFS_0p25_blended_${PDY}${cyc}f*.grib2 " >>gfs_downstream.txt - fi - fi + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl.idx" - echo "${dirname}${head}pgrb2.0p50.anl " >>gfsb.txt - echo "${dirname}${head}pgrb2.0p50.anl.idx " >>gfsb.txt - echo "${dirname}${head}pgrb2.1p00.anl " >>gfsb.txt - echo "${dirname}${head}pgrb2.1p00.anl.idx " >>gfsb.txt + #Only generated if there are cyclones to track + cyclone_files=("avno.t${cyc}z.cyclone.trackatcfunix" + "avnop.t${cyc}z.cyclone.trackatcfunix" + "trak.gfso.atcfunix.${PDY}${cyc}" + "trak.gfso.atcfunix.altg.${PDY}${cyc}") + for file in "${cyclone_files[@]}"; do + [[ -s ${COM_ATMOS_TRACK}/${file} ]] && echo "${COM_ATMOS_TRACK/${ROTDIR}\//}/${file}" + done - fh=0 - while [ $fh -le $FHMAX_GFS ]; do - fhr=$(printf %03i $fh) - if [ $ARCH_GAUSSIAN = "YES" ]; then - echo "${dirname}${head}sfluxgrbf${fhr}.grib2 " >>gfs_flux.txt - echo "${dirname}${head}sfluxgrbf${fhr}.grib2.idx " >>gfs_flux.txt - - echo "${dirname}${head}pgrb2b.0p25.f${fhr} " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.0p25.f${fhr}.idx " >>gfs_pgrb2b.txt - if [ -s $ROTDIR/${dirpath}${head}pgrb2b.1p00.f${fhr} ]; then - echo "${dirname}${head}pgrb2b.1p00.f${fhr} " >>gfs_pgrb2b.txt - echo "${dirname}${head}pgrb2b.1p00.f${fhr}.idx " >>gfs_pgrb2b.txt + genesis_files=("storms.gfso.atcf_gen.${PDY}${cyc}" + "storms.gfso.atcf_gen.altg.${PDY}${cyc}") + for file in "${genesis_files[@]}"; do + [[ -s ${COM_ATMOS_GENESIS}/${file} ]] && echo "${COM_ATMOS_GENESIS/${ROTDIR}\//}/${file}" + done + } >> gfsa.txt + + { + if [[ ${DO_DOWN} = "YES" ]]; then + if [[ ${DO_BUFRSND} = "YES" ]]; then + echo "${COM_ATMOS_GEMPAK/${ROTDIR}\//}/gfs_${PDY}${cyc}.sfc" + echo "${COM_ATMOS_GEMPAK/${ROTDIR}\//}/gfs_${PDY}${cyc}.snd" + echo "${COM_ATMOS_WMO/${ROTDIR}\//}/gfs_collective*.postsnd_${cyc}" + echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/bufr.t${cyc}z" + echo "${COM_ATMOS_BUFR/${ROTDIR}\//}/gfs.t${cyc}z.bufrsnd.tar.gz" + fi + if [[ ${WAFSF} = "YES" ]]; then + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/wafsgfs*.t${cyc}z.gribf*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t${cyc}z.wafs_grb45f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t${cyc}z.wafs_grb45f*.nouswafs.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/WAFS_blended_${PDY}${cyc}f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t*z.gcip.f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t${cyc}z.wafs_0p25.f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/gfs.t${cyc}z.wafs_0p25_unblended.f*.grib2" + echo "${COM_ATMOS_WAFS/${ROTDIR}\//}/WAFS_0p25_blended_${PDY}${cyc}f*.grib2" fi fi + } >> gfs_downstream.txt - echo "${dirname}${head}pgrb2.0p25.f${fhr} " >>gfsa.txt - echo "${dirname}${head}pgrb2.0p25.f${fhr}.idx " >>gfsa.txt - echo "${dirname}${head}logf${fhr}.txt " >>gfsa.txt + { + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl" + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" + } >> gfsb.txt - if [ -s $ROTDIR/${dirpath}${head}pgrb2.0p50.f${fhr} ]; then - echo "${dirname}${head}pgrb2.0p50.f${fhr} " >>gfsb.txt - echo "${dirname}${head}pgrb2.0p50.f${fhr}.idx " >>gfsb.txt - fi - if [ -s $ROTDIR/${dirpath}${head}pgrb2.1p00.f${fhr} ]; then - echo "${dirname}${head}pgrb2.1p00.f${fhr} " >>gfsb.txt - echo "${dirname}${head}pgrb2.1p00.f${fhr}.idx " >>gfsb.txt + + fh=0 + while (( fh <= FHMAX_GFS )); do + fhr=$(printf %03i "${fh}") + if [[ ${ARCH_GAUSSIAN} = "YES" ]]; then + { + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" + } >> gfs_flux.txt + + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2b.0p25.f${fhr}.idx" + if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2b.1p00.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/{head}pgrb2b.1p00.f${fhr}.idx" + fi + } >> gfs_pgrb2b.txt fi - inc=$FHOUT_GFS - if [ $FHMAX_HF_GFS -gt 0 -a $FHOUT_HF_GFS -gt 0 -a $fh -lt $FHMAX_HF_GFS ]; then - inc=$FHOUT_HF_GFS + { + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atm.logf${fhr}.txt" + } >> gfsa.txt + + + { + if [[ -s "${COM_ATMOS_GRIB_0p50}/${head}pgrb2.0p50.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.f${fhr}" + echo "${COM_ATMOS_GRIB_0p50/${ROTDIR}\//}/${head}pgrb2.0p50.f${fhr}.idx" + fi + if [[ -s "${COM_ATMOS_GRIB_1p00}/${head}pgrb2.1p00.f${fhr}" ]]; then + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" + fi + } >> gfsb.txt + + inc=${FHOUT_GFS} + if (( FHMAX_HF_GFS > 0 && FHOUT_HF_GFS > 0 && fh < FHMAX_HF_GFS )); then + inc=${FHOUT_HF_GFS} fi fh=$((fh+inc)) done #.................. - if [ $MODE = "cycled" ]; then - echo "${dirname}RESTART/*0000.sfcanl_data.tile1.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile2.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile3.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile4.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile5.nc " >>gfs_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile6.nc " >>gfs_restarta.txt - elif [ $MODE = "forecast-only" ]; then - echo "${dirname}INPUT/gfs_ctrl.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile1.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile2.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile3.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile4.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile5.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/gfs_data.tile6.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile1.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile2.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile3.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile4.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile5.nc " >>gfs_restarta.txt - echo "${dirname}INPUT/sfc_data.tile6.nc " >>gfs_restarta.txt - fi + { + if [[ ${MODE} = "cycled" ]]; then + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + elif [[ ${MODE} = "forecast-only" ]]; then + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_ctrl.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile1.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile2.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile3.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile4.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile5.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/gfs_data.tile6.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile1.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile2.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile3.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile4.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile5.nc" + echo "${COM_ATMOS_INPUT/${ROTDIR}\//}/sfc_data.tile6.nc" + fi + } >> gfs_restarta.txt + #.................. - if [ $DO_WAVE = "YES" ]; then + if [[ ${DO_WAVE} = "YES" ]]; then rm -rf gfswave.txt touch gfswave.txt - dirpath="gfs.${PDY}/${cyc}/wave/" - dirname="./${dirpath}" - head="gfswave.t${cyc}z." #........................... - echo "${dirname}rundata/ww3_multi* " >>gfswave.txt - echo "${dirname}gridded/${head}* " >>gfswave.txt - echo "${dirname}station/${head}* " >>gfswave.txt - + { + echo "${COM_WAVE_HISTORY/${ROTDIR}\//}/ww3_multi*" + echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" + echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" + } >> gfswave.txt fi - if [ $DO_OCN = "YES" ]; then - dirpath="gfs.${PDY}/${cyc}/ocean/" - dirname="./${dirpath}" + if [[ ${DO_OCN} = "YES" ]]; then head="gfs.t${cyc}z." rm -f gfs_flux_1p00.txt - rm -f ocn_ice_grib2_0p5.txt + rm -f ocn_ice_grib2_0p5.txt rm -f ocn_ice_grib2_0p25.txt rm -f ocn_2D.txt rm -f ocn_3D.txt rm -f ocn_xsect.txt rm -f ocn_daily.txt - rm -f wavocn.txt touch gfs_flux_1p00.txt touch ocn_ice_grib2_0p5.txt touch ocn_ice_grib2_0p25.txt @@ -242,44 +254,39 @@ if [ $type = "gfs" ]; then touch ocn_3D.txt touch ocn_xsect.txt touch ocn_daily.txt - touch wavocn.txt - echo "${dirname}MOM_input " >>ocn_2D.txt - echo "${dirname}ocn_2D* " >>ocn_2D.txt - echo "${dirname}ocn_3D* " >>ocn_3D.txt - echo "${dirname}ocn*EQ* " >>ocn_xsect.txt - echo "${dirname}ocn_daily* " >>ocn_daily.txt - echo "${dirname}wavocn* " >>wavocn.txt - echo "${dirname}ocn_ice*0p5x0p5.grb2 " >>ocn_ice_grib2_0p5.txt - echo "${dirname}ocn_ice*0p25x0p25.grb2 " >>ocn_ice_grib2_0p25.txt - - dirpath="gfs.${PDY}/${cyc}/atmos/" - dirname="./${dirpath}" - echo "${dirname}${head}flux.1p00.f??? " >>gfs_flux_1p00.txt - echo "${dirname}${head}flux.1p00.f???.idx " >>gfs_flux_1p00.txt + echo "${COM_OCEAN_INPUT/${ROTDIR}\//}/MOM_input" >> ocn_2D.txt + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_2D*" >> ocn_2D.txt + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/ocn_3D*" >> ocn_3D.txt + echo "${COM_OCEAN_XSECT/${ROTDIR}\//}/ocn*EQ*" >> ocn_xsect.txt + echo "${COM_OCEAN_DAILY/${ROTDIR}\//}/ocn_daily*" >> ocn_daily.txt + echo "${COM_OCEAN_GRIB_0p50/${ROTDIR}\//}/ocn_ice*0p5x0p5.grb2" >> ocn_ice_grib2_0p5.txt + echo "${COM_OCEAN_GRIB_0p25/${ROTDIR}\//}/ocn_ice*0p25x0p25.grb2" >> ocn_ice_grib2_0p25.txt + + # Also save fluxes from atmosphere + { + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}flux.1p00.f???.idx" + } >> gfs_flux_1p00.txt fi - if [ $DO_ICE = "YES" ]; then - dirpath="gfs.${PDY}/${cyc}/ice/" - dirname="./${dirpath}" - + if [[ ${DO_ICE} = "YES" ]]; then head="gfs.t${cyc}z." rm -f ice.txt touch ice.txt - echo "${dirname}ice_in " >>ice.txt - echo "${dirname}ice*nc " >>ice.txt + { + echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" + echo "${COM_ICE_HISTORY/${ROTDIR}\//}/ice*nc" + } >> ice.txt fi - if [ $DO_AERO = "YES" ]; then - dirpath="gfs.${PDY}/${cyc}/chem" - dirname="./${dirpath}" - + if [[ ${DO_AERO} = "YES" ]]; then head="gocart" rm -f chem.txt touch chem.txt - echo "${dirname}/${head}*" >> chem.txt + echo "${COM_CHEM_HISTORY/${ROTDIR}\//}/${head}*" >> chem.txt fi #----------------------------------------------------- @@ -289,7 +296,7 @@ fi ##end of gfs #----------------------------------------------------- -if [ $type = "gdas" ]; then +if [[ ${type} == "gdas" ]]; then #----------------------------------------------------- rm -f gdas.txt @@ -299,115 +306,159 @@ if [ $type = "gdas" ]; then touch gdas_restarta.txt touch gdas_restartb.txt - dirpath="gdas.${PDY}/${cyc}/atmos/" - dirname="./${dirpath}" head="gdas.t${cyc}z." #.................. - echo "${dirname}${head}gsistat " >>gdas.txt - echo "${dirname}${head}pgrb2.0p25.anl " >>gdas.txt - echo "${dirname}${head}pgrb2.0p25.anl.idx " >>gdas.txt - echo "${dirname}${head}pgrb2.1p00.anl " >>gdas.txt - echo "${dirname}${head}pgrb2.1p00.anl.idx " >>gdas.txt - echo "${dirname}${head}atmanl${SUFFIX} " >>gdas.txt - echo "${dirname}${head}sfcanl${SUFFIX} " >>gdas.txt - if [ -s $ROTDIR/${dirpath}${head}atmanl.ensres${SUFFIX} ]; then - echo "${dirname}${head}atmanl.ensres${SUFFIX} " >>gdas.txt - fi - if [ -s $ROTDIR/${dirpath}${head}atma003.ensres${SUFFIX} ]; then - echo "${dirname}${head}atma003.ensres${SUFFIX} " >>gdas.txt - fi - if [ -s $ROTDIR/${dirpath}${head}atma009.ensres${SUFFIX} ]; then - echo "${dirname}${head}atma009.ensres${SUFFIX} " >>gdas.txt - fi - if [ -s $ROTDIR/${dirpath}${head}cnvstat ]; then - echo "${dirname}${head}cnvstat " >>gdas.txt - fi - if [ -s $ROTDIR/${dirpath}${head}oznstat ]; then - echo "${dirname}${head}oznstat " >>gdas.txt - fi - if [ -s $ROTDIR/${dirpath}${head}radstat ]; then - echo "${dirname}${head}radstat " >>gdas.txt - fi - for fstep in prep anal gldas fcst vrfy radmon minmon oznmon; do - if [ -s $ROTDIR/logs/${CDATE}/gdas${fstep}.log ]; then - echo "./logs/${CDATE}/gdas${fstep}.log " >>gdas.txt - fi - done - echo "./logs/${CDATE}/gdaspost*.log " >>gdas.txt - - fh=0 - while [ $fh -le 9 ]; do - fhr=$(printf %03i $fh) - echo "${dirname}${head}sfluxgrbf${fhr}.grib2 " >>gdas.txt - echo "${dirname}${head}sfluxgrbf${fhr}.grib2.idx " >>gdas.txt - echo "${dirname}${head}pgrb2.0p25.f${fhr} " >>gdas.txt - echo "${dirname}${head}pgrb2.0p25.f${fhr}.idx " >>gdas.txt - echo "${dirname}${head}pgrb2.1p00.f${fhr} " >>gdas.txt - echo "${dirname}${head}pgrb2.1p00.f${fhr}.idx " >>gdas.txt - echo "${dirname}${head}logf${fhr}.txt " >>gdas.txt - echo "${dirname}${head}atmf${fhr}${SUFFIX} " >>gdas.txt - echo "${dirname}${head}sfcf${fhr}${SUFFIX} " >>gdas.txt - fh=$((fh+3)) - done - flist="001 002 004 005 007 008" - for fhr in $flist; do - echo "${dirname}${head}sfluxgrbf${fhr}.grib2 " >>gdas.txt - echo "${dirname}${head}sfluxgrbf${fhr}.grib2.idx " >>gdas.txt - done - + { + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}gsistat" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.anl.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.anl.idx" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}sfcanl.nc" + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atmanl.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmanl.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atma003.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atma003.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}atma009.ensres.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atma009.ensres.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}oznstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}oznstat" + fi + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" + fi + for fstep in prep anal fcst vrfy radmon minmon oznmon; do + if [[ -s "${ROTDIR}/logs/${PDY}${cyc}/gdas${fstep}.log" ]]; then + echo "./logs/${PDY}${cyc}/gdas${fstep}.log" + fi + done + echo "./logs/${PDY}${cyc}/gdaspost*.log" + fh=0 + while [[ ${fh} -le 9 ]]; do + fhr=$(printf %03i "${fh}") + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + echo "${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2.idx" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}" + echo "${COM_ATMOS_GRIB_0p25/${ROTDIR}\//}/${head}pgrb2.0p25.f${fhr}.idx" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}" + echo "${COM_ATMOS_GRIB_1p00/${ROTDIR}\//}/${head}pgrb2.1p00.f${fhr}.idx" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atm.logf${fhr}.txt" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}atmf${fhr}.nc" + echo "${COM_ATMOS_HISTORY/${ROTDIR}\//}/${head}sfcf${fhr}.nc" + fh=$((fh+3)) + done + flist="001 002 004 005 007 008" + for fhr in ${flist}; do + file="${COM_ATMOS_MASTER/${ROTDIR}\//}/${head}sfluxgrbf${fhr}.grib2" + if [[ -s "${file}" ]]; then + echo "${file}" + echo "${file}.idx" + fi + done + } >> gdas.txt #.................. - if [ -s $ROTDIR/${dirpath}${head}cnvstat ]; then - echo "${dirname}${head}cnvstat " >>gdas_restarta.txt + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}cnvstat" >> gdas_restarta.txt fi - if [ -s $ROTDIR/${dirpath}${head}radstat ]; then - echo "${dirname}${head}radstat " >>gdas_restarta.txt + if [[ -s "${COM_ATMOS_ANALYSIS}/${head}radstat" ]]; then + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}radstat" >> gdas_restarta.txt fi - echo "${dirname}${head}nsstbufr " >>gdas_restarta.txt - echo "${dirname}${head}prepbufr " >>gdas_restarta.txt - echo "${dirname}${head}prepbufr.acft_profiles " >>gdas_restarta.txt - echo "${dirname}${head}abias " >>gdas_restarta.txt - echo "${dirname}${head}abias_air " >>gdas_restarta.txt - echo "${dirname}${head}abias_int " >>gdas_restarta.txt - echo "${dirname}${head}abias_pc " >>gdas_restarta.txt - echo "${dirname}${head}atmi*nc " >>gdas_restarta.txt - echo "${dirname}${head}dtfanl.nc " >>gdas_restarta.txt - echo "${dirname}${head}loginc.txt " >>gdas_restarta.txt - - echo "${dirname}RESTART/*0000.sfcanl_data.tile1.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile2.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile3.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile4.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile5.nc " >>gdas_restarta.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile6.nc " >>gdas_restarta.txt + { + echo "${COM_OBS/${ROTDIR}\//}/${head}nsstbufr" + echo "${COM_OBS/${ROTDIR}\//}/${head}prepbufr" + echo "${COM_OBS/${ROTDIR}\//}/${head}prepbufr.acft_profiles" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}abias" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}abias_air" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}abias_int" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}abias_pc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}atmi*nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}dtfanl.nc" + echo "${COM_ATMOS_ANALYSIS/${ROTDIR}\//}/${head}loginc.txt" + + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + } >> gdas_restarta.txt #.................. - echo "${dirname}RESTART " >>gdas_restartb.txt + echo "${COM_ATMOS_RESTART/${ROTDIR}\//}" >> gdas_restartb.txt #.................. - if [ $DO_WAVE = "YES" ]; then + if [[ ${DO_WAVE} = "YES" ]]; then rm -rf gdaswave.txt touch gdaswave.txt rm -rf gdaswave_restart.txt touch gdaswave_restart.txt - dirpath="gdas.${PDY}/${cyc}/wave/" - dirname="./${dirpath}" - head="gdaswave.t${cyc}z." #........................... - echo "${dirname}gridded/${head}* " >>gdaswave.txt - echo "${dirname}station/${head}* " >>gdaswave.txt + { + echo "${COM_WAVE_GRID/${ROTDIR}\//}/${head}*" + echo "${COM_WAVE_STATION/${ROTDIR}\//}/${head}*" + } >> gdaswave.txt - echo "${dirname}restart/* " >>gdaswave_restart.txt + echo "${COM_WAVE_RESTART/${ROTDIR}\//}/*" >> gdaswave_restart.txt fi + #.................. + if [[ ${DO_OCN} = "YES" ]]; then + + rm -rf gdasocean.txt + touch gdasocean.txt + rm -rf gdasocean_restart.txt + touch gdasocean_restart.txt + + head="gdas.t${cyc}z." + + #........................... + { + echo "${COM_OCEAN_HISTORY/${ROTDIR}\//}/${head}*" + echo "${COM_OCEAN_INPUT/${ROTDIR}\//}" + } >> gdasocean.txt + + { + echo "${COM_OCEAN_RESTART/${ROTDIR}\//}/*" + echo "${COM_MED_RESTART/${ROTDIR}\//}/*" + } >> gdasocean_restart.txt + + fi + + if [[ ${DO_ICE} = "YES" ]]; then + + rm -rf gdasice.txt + touch gdasice.txt + rm -rf gdasice_restart.txt + touch gdasice_restart.txt + + head="gdas.t${cyc}z." + + #........................... + { + echo "${COM_ICE_HISTORY/${ROTDIR}\//}/${head}*" + echo "${COM_ICE_INPUT/${ROTDIR}\//}/ice_in" + } >> gdasice.txt + + echo "${COM_ICE_RESTART/${ROTDIR}\//}/*" >> gdasice_restart.txt + + fi + #----------------------------------------------------- fi ##end of gdas @@ -415,167 +466,181 @@ fi ##end of gdas #----------------------------------------------------- -if [ $type = "enkfgdas" -o $type = "enkfgfs" ]; then +if [[ ${type} == "enkfgdas" || ${type} == "enkfgfs" ]]; then #----------------------------------------------------- IAUFHRS_ENKF=${IAUFHRS_ENKF:-6} lobsdiag_forenkf=${lobsdiag_forenkf:-".false."} - nfhrs=$(echo $IAUFHRS_ENKF | sed 's/,/ /g') - NMEM_ENKF=${NMEM_ENKF:-80} + IFS=',' read -ra nfhrs <<< ${IAUFHRS_ENKF} + NMEM_ENS=${NMEM_ENS:-80} NMEM_EARCGRP=${NMEM_EARCGRP:-10} ##number of ens memebers included in each tarball - NTARS=$((NMEM_ENKF/NMEM_EARCGRP)) - [[ $NTARS -eq 0 ]] && NTARS=1 - [[ $((NTARS*NMEM_EARCGRP)) -lt $NMEM_ENKF ]] && NTARS=$((NTARS+1)) -##NTARS2=$((NTARS/2)) # number of earc groups to include analysis/increments - NTARS2=$NTARS + NTARS=$((NMEM_ENS/NMEM_EARCGRP)) + [[ ${NTARS} -eq 0 ]] && NTARS=1 + [[ $((NTARS*NMEM_EARCGRP)) -lt ${NMEM_ENS} ]] && NTARS=$((NTARS+1)) + ##NTARS2=$((NTARS/2)) # number of earc groups to include analysis/increments + NTARS2=${NTARS} - dirpath="enkf${CDUMP}.${PDY}/${cyc}/atmos/" - dirname="./${dirpath}" - head="${CDUMP}.t${cyc}z." + head="${RUN}.t${cyc}z." #.................. - rm -f enkf${CDUMP}.txt - touch enkf${CDUMP}.txt - - echo "${dirname}${head}enkfstat " >>enkf${CDUMP}.txt - echo "${dirname}${head}gsistat.ensmean " >>enkf${CDUMP}.txt - if [ -s $ROTDIR/${dirpath}${head}cnvstat.ensmean ]; then - echo "${dirname}${head}cnvstat.ensmean " >>enkf${CDUMP}.txt - fi - if [ -s $ROTDIR/${dirpath}${head}oznstat.ensmean ]; then - echo "${dirname}${head}oznstat.ensmean " >>enkf${CDUMP}.txt - fi - if [ -s $ROTDIR/${dirpath}${head}radstat.ensmean ]; then - echo "${dirname}${head}radstat.ensmean " >>enkf${CDUMP}.txt - fi - for FHR in $nfhrs; do # loop over analysis times in window - if [ $FHR -eq 6 ]; then - if [ -s $ROTDIR/${dirpath}${head}atmanl.ensmean${SUFFIX} ]; then - echo "${dirname}${head}atmanl.ensmean${SUFFIX} " >>enkf${CDUMP}.txt - fi - if [ -s $ROTDIR/${dirpath}${head}atminc.ensmean${SUFFIX} ]; then - echo "${dirname}${head}atminc.ensmean${SUFFIX} " >>enkf${CDUMP}.txt + rm -f "${RUN}.txt" + touch "${RUN}.txt" + + { + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}enkfstat" + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}gsistat.ensmean" + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}cnvstat.ensmean" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}cnvstat.ensmean" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}oznstat.ensmean" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}oznstat.ensmean" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}radstat.ensmean" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}radstat.ensmean" + fi + for FHR in "${nfhrs[@]}"; do # loop over analysis times in window + if [[ ${FHR} -eq 6 ]]; then + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmanl.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmanl.ensmean.nc" fi - else - if [ -s $ROTDIR/${dirpath}${head}atma00${FHR}.ensmean${SUFFIX} ]; then - echo "${dirname}${head}atma00${FHR}.ensmean${SUFFIX} " >>enkf${CDUMP}.txt + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atminc.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atminc.ensmean.nc" fi - if [ -s $ROTDIR/${dirpath}${head}atmi00${FHR}.ensmean${SUFFIX} ]; then - echo "${dirname}${head}atmi00${FHR}.ensmean${SUFFIX} " >>enkf${CDUMP}.txt + else + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atma00${FHR}.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atma00${FHR}.ensmean.nc" fi - fi - done # loop over FHR - for fstep in eobs ecen esfc eupd efcs epos ; do - echo "logs/${CDATE}/${CDUMP}${fstep}*.log " >>enkf${CDUMP}.txt - done - -# eomg* are optional jobs - for log in $ROTDIR/logs/${CDATE}/${CDUMP}eomg*.log; do - if [ -s "$log" ]; then - echo "logs/${CDATE}/${CDUMP}eomg*.log " >>enkf${CDUMP}.txt - fi - break - done + if [[ -s "${COM_ATMOS_ANALYSIS_ENSSTAT}/${head}atmi00${FHR}.ensmean.nc" ]]; then + echo "${COM_ATMOS_ANALYSIS_ENSSTAT/${ROTDIR}\//}/${head}atmi00${FHR}.ensmean.nc" + fi + fi + done # loop over FHR + for fstep in eobs ecen esfc eupd efcs epos ; do + echo "logs/${PDY}${cyc}/${RUN}${fstep}*.log" + done + # eomg* are optional jobs + for log in "${ROTDIR}/logs/${PDY}${cyc}/${RUN}eomg"*".log"; do + if [[ -s "${log}" ]]; then + echo "logs/${PDY}${cyc}/${RUN}eomg*.log" + fi + break + done -# Ensemble spread file only available with netcdf output - fh=3 - while [ $fh -le 9 ]; do - fhr=$(printf %03i $fh) - echo "${dirname}${head}atmf${fhr}.ensmean${SUFFIX} " >>enkf${CDUMP}.txt - echo "${dirname}${head}sfcf${fhr}.ensmean${SUFFIX} " >>enkf${CDUMP}.txt - if [ $OUTPUT_FILE = "netcdf" ]; then - if [ -s $ROTDIR/${dirpath}${head}atmf${fhr}.ensspread${SUFFIX} ]; then - echo "${dirname}${head}atmf${fhr}.ensspread${SUFFIX} " >>enkf${CDUMP}.txt - fi - fi - fh=$((fh+3)) - done + # Ensemble spread file only available with netcdf output + fh=3 + while [ $fh -le 9 ]; do + fhr=$(printf %03i $fh) + echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}atmf${fhr}.ensmean.nc" + echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}sfcf${fhr}.ensmean.nc" + if [[ -s "${COM_ATMOS_HISTORY_ENSSTAT}/${head}atmf${fhr}.ensspread.nc" ]]; then + echo "${COM_ATMOS_HISTORY_ENSSTAT/${ROTDIR}\//}/${head}atmf${fhr}.ensspread.nc" + fi + fh=$((fh+3)) + done + } >> "${RUN}.txt" #........................... n=1 - while [ $n -le $NTARS ]; do - #........................... + while (( n <= NTARS )); do + #........................... - rm -f enkf${CDUMP}_grp${n}.txt - rm -f enkf${CDUMP}_restarta_grp${n}.txt - rm -f enkf${CDUMP}_restartb_grp${n}.txt - touch enkf${CDUMP}_grp${n}.txt - touch enkf${CDUMP}_restarta_grp${n}.txt - touch enkf${CDUMP}_restartb_grp${n}.txt - - m=1 - while [ $m -le $NMEM_EARCGRP ]; do - nm=$(((n-1)*NMEM_EARCGRP+m)) - mem=$(printf %03i $nm) - dirpath="enkf${CDUMP}.${PDY}/${cyc}/atmos/mem${mem}/" - dirname="./${dirpath}" - head="${CDUMP}.t${cyc}z." - - #--- - for FHR in $nfhrs; do # loop over analysis times in window - if [ $FHR -eq 6 ]; then - if [ $n -le $NTARS2 ]; then - if [ -s $ROTDIR/${dirpath}${head}atmanl${SUFFIX} ] ; then - echo "${dirname}${head}atmanl${SUFFIX} " >>enkf${CDUMP}_grp${n}.txt + rm -f "${RUN}_grp${n}.txt" + rm -f "${RUN}_restarta_grp${n}.txt" + rm -f "${RUN}_restartb_grp${n}.txt" + touch "${RUN}_grp${n}.txt" + touch "${RUN}_restarta_grp${n}.txt" + touch "${RUN}_restartb_grp${n}.txt" + + m=1 + while (( m <= NMEM_EARCGRP )); do + nm=$(((n-1)*NMEM_EARCGRP+m)) + mem=$(printf %03i ${nm}) + head="${RUN}.t${cyc}z." + + MEMDIR="mem${mem}" YMD=${PDY} HH=${cyc} generate_com \ + COM_ATMOS_ANALYSIS_MEM:COM_ATMOS_ANALYSIS_TMPL \ + COM_ATMOS_RESTART_MEM:COM_ATMOS_RESTART_TMPL \ + COM_ATMOS_HISTORY_MEM:COM_ATMOS_HISTORY_TMPL + + #--- + for FHR in "${nfhrs[@]}"; do # loop over analysis times in window + if [ "${FHR}" -eq 6 ]; then + { + if (( n <= NTARS2 )); then + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}atmanl.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atmanl.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" + fi fi - if [ -s $ROTDIR/${dirpath}${head}ratminc${SUFFIX} ] ; then - echo "${dirname}${head}ratminc${SUFFIX} " >>enkf${CDUMP}_grp${n}.txt - fi - fi - if [ -s $ROTDIR/${dirpath}${head}ratminc${SUFFIX} ] ; then - echo "${dirname}${head}ratminc${SUFFIX} " >>enkf${CDUMP}_restarta_grp${n}.txt - fi + } >> "${RUN}_grp${n}.txt" - else - if [ $n -le $NTARS2 ]; then - if [ -s $ROTDIR/${dirpath}${head}atma00${FHR}${SUFFIX} ] ; then - echo "${dirname}${head}atma00${FHR}${SUFFIX} " >>enkf${CDUMP}_grp${n}.txt - fi - if [ -s $ROTDIR/${dirpath}${head}ratmi00${FHR}${SUFFIX} ] ; then - echo "${dirname}${head}ratmi00${FHR}${SUFFIX} " >>enkf${CDUMP}_grp${n}.txt + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratminc.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratminc.nc" \ + >> "${RUN}_restarta_grp${n}.txt" + fi + + else + { + if (( n <= NTARS2 )); then + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}atma00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}atma00${FHR}.nc" + fi + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" + fi fi - fi - if [ -s $ROTDIR/${dirpath}${head}ratmi00${FHR}${SUFFIX} ] ; then - echo "${dirname}${head}ratmi00${FHR}${SUFFIX} " >>enkf${CDUMP}_restarta_grp${n}.txt - fi - - fi - echo "${dirname}${head}atmf00${FHR}${SUFFIX} " >>enkf${CDUMP}_grp${n}.txt - if [ $FHR -eq 6 ]; then - echo "${dirname}${head}sfcf00${FHR}${SUFFIX} " >>enkf${CDUMP}_grp${n}.txt + } >> "${RUN}_grp${n}.txt" + if [[ -s "${COM_ATMOS_ANALYSIS_MEM}/${head}ratmi00${FHR}.nc" ]] ; then + echo "${COM_ATMOS_ANALYSIS_MEM/${ROTDIR}\//}/${head}ratmi00${FHR}.nc" \ + >> "${RUN}_restarta_grp${n}.txt" + fi + fi + { + echo "${COM_ATMOS_HISTORY_MEM/${ROTDIR}\//}/${head}atmf00${FHR}.nc" + if (( FHR == 6 )); then + echo "${COM_ATMOS_HISTORY_MEM/${ROTDIR}\//}/${head}sfcf00${FHR}.nc" + fi + } >> "${RUN}_grp${n}.txt" + done # loop over FHR + + if [[ ${lobsdiag_forenkf} == ".false." ]] ; then + { + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}gsistat" + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]] ; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" + fi + } >> "${RUN}_grp${n}.txt" + + { + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}radstat" ]]; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}radstat" + fi + if [[ -s "${COM_ATMOS_RESTART_MEM}/${head}cnvstat" ]]; then + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}cnvstat" + fi + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_air" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_int" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/${head}abias_pc" + } >> "${RUN}_restarta_grp${n}.txt" fi - done # loop over FHR - - if [[ lobsdiag_forenkf = ".false." ]] ; then - echo "${dirname}${head}gsistat " >>enkf${CDUMP}_grp${n}.txt - if [ -s $ROTDIR/${dirpath}${head}cnvstat ] ; then - echo "${dirname}${head}cnvstat " >>enkf${CDUMP}_grp${n}.txt - fi - if [ -s $ROTDIR/${dirpath}${head}radstat ]; then - echo "${dirname}${head}radstat " >>enkf${CDUMP}_restarta_grp${n}.txt - fi - if [ -s $ROTDIR/${dirpath}${head}cnvstat ]; then - echo "${dirname}${head}cnvstat " >>enkf${CDUMP}_restarta_grp${n}.txt - fi - echo "${dirname}${head}abias " >>enkf${CDUMP}_restarta_grp${n}.txt - echo "${dirname}${head}abias_air " >>enkf${CDUMP}_restarta_grp${n}.txt - echo "${dirname}${head}abias_int " >>enkf${CDUMP}_restarta_grp${n}.txt - echo "${dirname}${head}abias_pc " >>enkf${CDUMP}_restarta_grp${n}.txt - fi - #--- - echo "${dirname}RESTART/*0000.sfcanl_data.tile1.nc " >>enkf${CDUMP}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile2.nc " >>enkf${CDUMP}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile3.nc " >>enkf${CDUMP}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile4.nc " >>enkf${CDUMP}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile5.nc " >>enkf${CDUMP}_restarta_grp${n}.txt - echo "${dirname}RESTART/*0000.sfcanl_data.tile6.nc " >>enkf${CDUMP}_restarta_grp${n}.txt - - #--- - echo "${dirname}RESTART " >>enkf${CDUMP}_restartb_grp${n}.txt - - m=$((m+1)) - done + #--- + { + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile1.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile2.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile3.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile4.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile5.nc" + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}/*0000.sfcanl_data.tile6.nc" + } >> "${RUN}_restarta_grp${n}.txt" + #--- + echo "${COM_ATMOS_RESTART_MEM/${ROTDIR}\//}" >> "${RUN}_restartb_grp${n}.txt" + + m=$((m+1)) + done #........................... diff --git a/ush/inter_flux.sh b/ush/inter_flux.sh index debb51f64c..b1f4475e05 100755 --- a/ush/inter_flux.sh +++ b/ush/inter_flux.sh @@ -1,5 +1,6 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" "$FH" #----------------------------------------------------------------------- #-Wen Meng, 03/2019: First version. @@ -7,13 +8,10 @@ set -x # into lat-lon grids. #----------------------------------------------------------------------- - -echo "!!!!!CREATING $RUN FLUX PRODUCTS FOR FH = $FH !!!!!!" - -export CNVGRIB=${CNVGRIB:-${NWPROD:-/nwprod}/util/exec/cnvgrib21} -export COPYGB2=${COPYGB2:-${NWPROD:-/nwprod}/util/exec/copygb2} -export WGRIB2=${WGRIB2:-${NWPROD:-/nwprod}/util/exec/wgrib2} -export GRBINDEX=${GRBINDEX:-${NWPROD:-nwprod}/util/exec/grbindex} +export CNVGRIB=${CNVGRIB:-${grib_util_ROOT}/bin/cnvgrib} +export COPYGB2=${COPYGB2:-${grib_util_ROOT}/bin/copygb} +export WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} +export GRBINDEX=${GRBINDEX:-${wgrib2_ROOT}/bin/grbindex} export RUN=${RUN:-"gfs"} export cycn=$(echo $CDATE |cut -c 9-10) export TCYC=${TCYC:-".t${cycn}z."} @@ -45,17 +43,13 @@ else fi #--------------------------------------------------------------- + ${WGRIB2} "${COM_ATMOS_MASTER}/${FLUXFL}" ${option1} ${option21} ${option22} ${option23} \ + ${option24} ${option25} ${option26} ${option27} ${option28} \ + -new_grid ${grid1p0} fluxfile_${fhr3}_1p00 + export err=$?; err_chk - $WGRIB2 $COMOUT/${FLUXFL} $option1 $option21 $option22 $option23 $option24 \ - $option25 $option26 $option27 $option28 \ - -new_grid $grid1p0 fluxfile_${fhr3}_1p00 - - - $WGRIB2 -s fluxfile_${fhr3}_1p00 > $COMOUT/${PREFIX}flux.1p00.f${fhr3}.idx - cp fluxfile_${fhr3}_1p00 $COMOUT/${PREFIX}flux.1p00.f${fhr3} - -#--------------------------------------------------------------- -echo "!!!!!CREATION OF SELECT $RUN FLUX PRODUCTS COMPLETED FOR FHR = $FH !!!!!" + ${WGRIB2} -s "fluxfile_${fhr3}_1p00" > "${COM_ATMOS_GRIB_1p00}/${PREFIX}flux.1p00.f${fhr3}.idx" + cp "fluxfile_${fhr3}_1p00" "${COM_ATMOS_GRIB_1p00}/${PREFIX}flux.1p00.f${fhr3}" #--------------------------------------------------------------- diff --git a/ush/jjob_header.sh b/ush/jjob_header.sh new file mode 100644 index 0000000000..45fa6402ae --- /dev/null +++ b/ush/jjob_header.sh @@ -0,0 +1,115 @@ +#! /usr/bin/env bash +# +# Universal header for global j-jobs +# +# Sets up and completes actions common to all j-jobs: +# - Creates and moves to $DATA after removing any +# existing one unless $WIPE_DATA is set to "NO" +# - Runs `setpdy.sh` +# - Sources configs provided as arguments +# - Sources machine environment script +# - Defines a few other variables +# +# The job name for the environment files should be passed +# in using the `-e` option (required). Any config files +# to be sourced should be passed in as an argument to +# the `-c` option. For example: +# ``` +# jjob_header.sh -e "fcst" -c "base fcst" +# ``` +# Will source `config.base` and `config.fcst`, then pass +# `fcst` to the ${machine}.env script. +# +# Script requires the following variables to already be +# defined in the environment: +# - $HOMEgfs +# - $DATAROOT (unless $DATA is overriden) +# - $jobid +# - $PDY +# - $cyc +# - $machine +# +# Additionally, there are a couple of optional settings that +# can be set before calling the script: +# - $EXPDIR : Override the default $EXPDIR +# [default: ${HOMEgfs}/parm/config] +# - $DATA : Override the default $DATA location +# [default: ${DATAROOT}/${jobid}] +# - $WIPE_DATA : Set whether to delete any existing $DATA +# [default: "YES"] +# - $pid : Override the default process id +# [default: $$] +# + +OPTIND=1 +while getopts "c:e:" option; do + case "${option}" in + c) read -ra configs <<< "${OPTARG}" ;; + e) env_job=${OPTARG} ;; + :) + echo "FATAL [${BASH_SOURCE[0]}]: ${option} requires an argument" + exit 1 + ;; + *) + echo "FATAL [${BASH_SOURCE[0]}]: Unrecognized option: ${option}" + exit 1 + ;; + esac +done +shift $((OPTIND-1)) + +if [[ -z ${env_job} ]]; then + echo "FATAL [${BASH_SOURCE[0]}]: Must specify a job name with -e" + exit 1 +fi + +############################################## +# make temp directory +############################################## +export DATA=${DATA:-"${DATAROOT}/${jobid}"} +if [[ ${WIPE_DATA:-YES} == "YES" ]]; then + rm -rf "${DATA}" +fi +mkdir -p "${DATA}" +cd "${DATA}" || ( echo "FATAL [${BASH_SOURCE[0]}]: ${DATA} does not exist"; exit 1 ) + + +############################################## +# Run setpdy and initialize PDY variables +############################################## +export cycle="t${cyc}z" +setpdy.sh +source ./PDY + + +############################################## +# Determine Job Output Name on System +############################################## +export pid="${pid:-$$}" +export pgmout="OUTPUT.${pid}" +export pgmerr=errfile + + +############################# +# Source relevant config files +############################# +export EXPDIR="${EXPDIR:-${HOMEgfs}/parm/config}" +for config in "${configs[@]:-''}"; do + source "${EXPDIR}/config.${config}" + status=$? + if (( status != 0 )); then + echo "FATAL [${BASH_SOURCE[0]}]: Unable to load config config.${config}" + exit "${status}" + fi +done + + +########################################## +# Source machine runtime environment +########################################## +source "${HOMEgfs}/env/${machine}.env" "${env_job}" +status=$? +if (( status != 0 )); then + echo "FATAL [${BASH_SOURCE[0]}]: Error while sourcing machine environment ${machine}.env for job ${env_job}" + exit "${status}" +fi diff --git a/ush/link_crtm_fix.sh b/ush/link_crtm_fix.sh index 3307b5f6dd..0d4d8dc55b 100755 --- a/ush/link_crtm_fix.sh +++ b/ush/link_crtm_fix.sh @@ -1,4 +1,6 @@ -#! /bin/sh +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" # Get CRTM fix directory from (in this order): # 1. First argument to script, or diff --git a/ush/load_fv3gfs_modules.sh b/ush/load_fv3gfs_modules.sh index a13a6ee3ab..2899e69514 100755 --- a/ush/load_fv3gfs_modules.sh +++ b/ush/load_fv3gfs_modules.sh @@ -1,49 +1,49 @@ -#!/bin/sh -#set -x +#! /usr/bin/env bash ############################################################### +if [[ "${DEBUG_WORKFLOW:-NO}" == "NO" ]]; then + echo "Loading modules quietly..." + set +x +fi + # Setup runtime environment by loading modules ulimit_s=$( ulimit -S -s ) -#ulimit -S -s 10000 - -set +x # Find module command and purge: -source "$HOMEgfs/modulefiles/module-setup.sh.inc" +source "${HOMEgfs}/modulefiles/module-setup.sh.inc" # Load our modules: -module use "$HOMEgfs/modulefiles" +module use "${HOMEgfs}/modulefiles" -if [[ -d /lfs4 ]] ; then - # We are on NOAA Jet - module load module_base.jet +if [[ -d /lfs/f1 ]]; then + # We are on WCOSS2 (Cactus or Dogwood) + module load module_base.wcoss2 +elif [[ -d /mnt/lfs1 ]] ; then + # We are on NOAA Jet + module load module_base.jet elif [[ -d /scratch1 ]] ; then - # We are on NOAA Hera - module load module_base.hera + # We are on NOAA Hera + module load module_base.hera elif [[ -d /work ]] ; then - # We are on MSU Orion - module load module_base.orion -elif [[ -d /gpfs/hps && -e /etc/SuSE-release ]] ; then - # We are on NOAA Luna or Surge - module load module_base.wcoss_c -elif [[ -L /usrx && "$( readlink /usrx 2> /dev/null )" =~ dell ]] ; then - # We are on NOAA Mars or Venus - module load module_base.wcoss_dell_p3 -elif [[ -d /dcom && -d /hwrf ]] ; then - # We are on NOAA Tide or Gyre - module load module_base.wcoss + # We are on MSU Orion + module load module_base.orion elif [[ -d /glade ]] ; then - # We are on NCAR Yellowstone - module load module_base.cheyenne + # We are on NCAR Yellowstone + module load module_base.cheyenne elif [[ -d /lustre && -d /ncrc ]] ; then - # We are on GAEA. - module load module_base.gaea + # We are on GAEA. + module load module_base.gaea +elif [[ -d /data/prod ]] ; then + # We are on SSEC S4 + module load module_base.s4 else - echo WARNING: UNKNOWN PLATFORM + echo WARNING: UNKNOWN PLATFORM fi -set -x +module list # Restore stack soft limit: -ulimit -S -s "$ulimit_s" +ulimit -S -s "${ulimit_s}" unset ulimit_s + +set_trace diff --git a/ush/load_ufsda_modules.sh b/ush/load_ufsda_modules.sh new file mode 100755 index 0000000000..da8e2d8096 --- /dev/null +++ b/ush/load_ufsda_modules.sh @@ -0,0 +1,85 @@ +#! /usr/bin/env bash + +############################################################### +if [[ "${DEBUG_WORKFLOW:-NO}" == "NO" ]]; then + echo "Loading modules quietly..." + set +x +fi + +# Read optional module argument, default is to use GDAS +MODS="GDAS" +if [[ $# -gt 0 ]]; then + case "$1" in + --eva) + MODS="EVA" + ;; + --gdas) + MODS="GDAS" + ;; + *) + echo "Invalid option: $1" >&2 + exit 1 + ;; + esac +fi + +# Setup runtime environment by loading modules +ulimit_s=$( ulimit -S -s ) + +# Find module command and purge: +source "${HOMEgfs}/modulefiles/module-setup.sh.inc" + +# Load our modules: +module use "${HOMEgfs}/sorc/gdas.cd/modulefiles" + +if [[ -d /lfs/f1 ]]; then + # We are on WCOSS2 (Cactus or Dogwood) + echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM +elif [[ -d /lfs3 ]] ; then + # We are on NOAA Jet + echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM +elif [[ -d /scratch1 ]] ; then + # We are on NOAA Hera + module load "${MODS}/hera" + # set NETCDF variable based on ncdump location + NETCDF=$( which ncdump ) + export NETCDF + # prod_util stuff, find a better solution later... + module use /scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/compiler/intel/2022.1.2/ + module load prod_util +elif [[ -d /work ]] ; then + # We are on MSU Orion + # prod_util stuff, find a better solution later... + #module use /apps/contrib/NCEP/hpc-stack/libs/hpc-stack/modulefiles/compiler/intel/2022.1.2/ + #module load prod_util + export UTILROOT=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2 + export MDATE=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2/bin/mdate + export NDATE=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2/bin/ndate + export NHOUR=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2/bin/nhour + export FSYNC=/work2/noaa/da/python/opt/intel-2022.1.2/prod_util/1.2.2/bin/fsync_file + module load "${MODS}/orion" + # set NETCDF variable based on ncdump location + ncdump=$( which ncdump ) + NETCDF=$( echo "${ncdump}" | cut -d " " -f 3 ) + export NETCDF +elif [[ -d /glade ]] ; then + # We are on NCAR Yellowstone + echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM +elif [[ -d /lustre && -d /ncrc ]] ; then + # We are on GAEA. + echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM +elif [[ -d /data/prod ]] ; then + # We are on SSEC S4 + echo WARNING: UFSDA NOT SUPPORTED ON THIS PLATFORM +else + echo WARNING: UNKNOWN PLATFORM +fi + +module list +pip list + +# Restore stack soft limit: +ulimit -S -s "${ulimit_s}" +unset ulimit_s + +set_trace diff --git a/ush/merge_fv3_aerosol_tile.py b/ush/merge_fv3_aerosol_tile.py index 7538bc7a76..decf6e9cba 100755 --- a/ush/merge_fv3_aerosol_tile.py +++ b/ush/merge_fv3_aerosol_tile.py @@ -155,8 +155,8 @@ def main() -> None: parser.add_argument('core_file', type=str, help="File containing the dycore sigma level coefficients") parser.add_argument('ctrl_file', type=str, help="File containing the sigma level coefficients for atmospheric IC data") parser.add_argument('rest_file', type=str, help="File containing the pressure level thickness for the restart state") - parser.add_argument('variable_file', type=str, help="File containing list of tracer variable_names in the chem_file to add to the atm_file, one tracer per line") - parser.add_argument('out_file', type=str, nargs="?", help="Name of file to create. If none is specified, the atm_file will be edited in place. New file will be a copy of atm_file with the specificed tracers listed in variable_file appended from chem_file and ntracers updated.") + parser.add_argument('variable_file', type=str, help="File with list of tracer variable_names in the chem_file to add to the atm_file, one tracer per line") + parser.add_argument('out_file', type=str, nargs="?", help="Name of file to create") args = parser.parse_args() diff --git a/ush/minmon_xtrct_costs.pl b/ush/minmon_xtrct_costs.pl new file mode 100755 index 0000000000..1b5d490102 --- /dev/null +++ b/ush/minmon_xtrct_costs.pl @@ -0,0 +1,231 @@ +#!/usr/bin/env perl + +#--------------------------------------------------------------------------- +# minmon_xtrct_costs.pl +# +# Extract cost data from gsistat file and load into cost +# and cost term files. +#--------------------------------------------------------------------------- + +use strict; +use warnings; + +#---------------------------------------------- +# subroutine to trim white space from strings +#---------------------------------------------- +sub trim { my $s = shift; $s =~ s/^\s+|\s+$//g; return $s }; + + +#--------------------------- +# +# Main routine begins here +# +#--------------------------- + +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_costs.pl SUFFIX PDY cyc infile jlogfile\n"; + exit; +} +my $suffix = $ARGV[0]; + +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; + +my $use_costterms = 0; +my $no_data = 0.00; + +my $scr = "minmon_xtrct_costs.pl"; +print "$scr has started\n"; + + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; + +if( (-e $infile) ) { + + my $found_cost = 0; + my $found_costterms = 0; + my @cost_array; + my @jb_array; + my @jo_array; + my @jc_array; + my @jl_array; + my @term_array; + my @all_cost_terms; + + my $cost_target; + my $cost_number; + my $costterms_target; + my $jb_number = 5; + my $jo_number = 6; + my $jc_number = 7; + my $jl_number = 8; + + my $costfile = $ENV{"mm_costfile"}; + + if( (-e $costfile) ) { + open( COSTFILE, "<${costfile}" ) or die "Can't open ${costfile}: $!\n"; + my $line; + + while( $line = ) { + if( $line =~ /cost_target/ ) { + my @termsline = split( /:/, $line ); + $cost_target = $termsline[1]; + } elsif( $line =~ /cost_number/ ) { + my @termsline = split( /:/, $line ); + $cost_number = $termsline[1]; + } elsif( $line =~ /costterms_target/ ){ + my @termsline = split( /:/, $line ); + $costterms_target = $termsline[1]; + } + } + close( COSTFILE ); + } else { + $rc = 2; + } + + #------------------------------------------------------------------------ + # Open the infile and search for the $costterms_target and $cost_target + # strings. If found, parse out the cost information and push into + # holding arrays. + #------------------------------------------------------------------------ + if( $rc == 0 ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my $line; + my $term_ctr=0; + + while( $line = ) { + + if( $line =~ /$costterms_target/ ) { + my @termsline = split( / +/, $line ); + push( @jb_array, $termsline[$jb_number] ); + push( @jo_array, $termsline[$jo_number] ); + push( @jc_array, $termsline[$jc_number] ); + push( @jl_array, $termsline[$jl_number] ); + $use_costterms = 1; + } + + if( $line =~ /$cost_target/ ) { + my @costline = split( / +/, $line ); + push( @cost_array, $costline[$cost_number] ); + } + + if( $term_ctr > 0 ) { + my @termline = split( / +/, $line ); + + if ( $term_ctr < 10 ) { + push( @term_array, trim($termline[1]) ); + push( @term_array, trim($termline[2]) ); + push( @term_array, trim($termline[3]) ); + $term_ctr++; + } else { + push( @term_array, trim($termline[1]) ); + push( @term_array, trim($termline[2]) ); + $term_ctr = 0; + } + + }elsif ( $line =~ "J=" && $line !~ "EJ=" ) { + my @termline = split( / +/, $line ); + push( @term_array, trim($termline[2]) ); + push( @term_array, trim($termline[3]) ); + push( @term_array, trim($termline[4]) ); + $term_ctr = 1; + } + } + + close( INFILE ); + + + #---------------------------------------------- + # move cost_array into all_costs by iteration + #---------------------------------------------- + my @all_costs; + for my $i (0 .. $#cost_array) { + my $iterline; + if( $use_costterms == 1 ){ + $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', + $i, $cost_array[$i], $jb_array[$i], $jo_array[$i], + $jc_array[$i], $jl_array[$i], "\n"; + } + else { + $iterline = sprintf ' %d,%e,%e,%e,%e,%e%s', + $i, $cost_array[$i], $no_data, $no_data, + $no_data, $no_data, "\n"; + } + + push( @all_costs, $iterline ); + } + + #--------------------------------------------------- + # move term_array into all_cost_terms by iteration + #--------------------------------------------------- + if( @term_array > 0 ) { + my $nterms = 32; + my $max_iter = ($#term_array+1)/$nterms; + my $niter = $max_iter -1; + + for my $iter (0 .. $niter ) { + my $step = $iter * $nterms; + my $iterline = sprintf '%d, %e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e,%e%s', + $iter, $term_array[$step], $term_array[$step+1], $term_array[$step+2], + $term_array[$step+3], $term_array[$step+4], $term_array[$step+5], + $term_array[$step+6], $term_array[$step+7], $term_array[$step+8], + $term_array[$step+9], $term_array[$step+10], $term_array[$step+11], + $term_array[$step+12], $term_array[$step+13], $term_array[$step+14], + $term_array[$step+15], $term_array[$step+16], $term_array[$step+17], + $term_array[$step+18], $term_array[$step+19], $term_array[$step+20], + $term_array[$step+21], $term_array[$step+22], $term_array[$step+23], + $term_array[$step+24], $term_array[$step+25], $term_array[$step+26], + $term_array[$step+27], $term_array[$step+28], $term_array[$step+29], + $term_array[$step+30], $term_array[$step+31], "\n"; + push( @all_cost_terms, $iterline ); + } + } + + #------------------------------------------ + # write all_costs array to costs.txt file + #------------------------------------------ + my $filename2 = "${cdate}.costs.txt"; + if( @all_costs > 0 ) { + open( OUTFILE, ">$filename2" ) or die "Can't open ${filename2}: $!\n"; + print OUTFILE @all_costs; + close( OUTFILE ); + } + + #----------------------------------------------------- + # write all_cost_terms array to costs_terms.txt file + #----------------------------------------------------- + my $filename3 = "${cdate}.cost_terms.txt"; + if( @all_cost_terms > 0 ) { + open( OUTFILE, ">$filename3" ) or die "Can't open ${filename3}: $!\n"; + print OUTFILE @all_cost_terms; + close( OUTFILE ); + } + + #-------------------------- + # move files to $M_TANKverf + #-------------------------- + my $tankdir = $ENV{"M_TANKverfM0"}; + if(! -d $tankdir) { + system( "mkdir -p $tankdir" ); + } + + if( -e $filename2 ) { + my $newfile2 = "${tankdir}/${filename2}"; + system("cp -f $filename2 $newfile2"); + } + if( -e $filename3 ) { + my $newfile3 = "${tankdir}/${filename3}"; + system("cp -f $filename3 $newfile3"); + } + + } # $rc still == 0 after reading gmon_cost.txt +} +else { # $infile does not exist + $rc = 1; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/minmon_xtrct_gnorms.pl b/ush/minmon_xtrct_gnorms.pl new file mode 100755 index 0000000000..ecd44232da --- /dev/null +++ b/ush/minmon_xtrct_gnorms.pl @@ -0,0 +1,442 @@ +#!/usr/bin/env perl + +use strict; +use warnings; +use List::MoreUtils 'true'; +use List::MoreUtils 'first_index'; +use List::MoreUtils 'last_index'; + +#--------------------------------------------------------------------------- +# minmon_xtrct_gnorms.pl +# +# Update the gnorm_data.txt file with data from a new cycle. Add +# this new data to the last line of the gnorm_data.txt file. +# +# Note: If the gnorm_data.txt file does not exist, it will be created. +# +# The gnorm_data.txt file is used plotted directly by the javascript on +# the GSI stats page. +#--------------------------------------------------------------------------- +sub updateGnormData { + my $cycle = $_[0]; + my $igrad = $_[1]; + my $fgnorm = $_[2]; + my $avg_gnorm = $_[3]; + my $min_gnorm = $_[4]; + my $max_gnorm = $_[5]; + my $suffix = $_[6]; + + my $rc = 0; + my @filearray; + + my $gdfile = "gnorm_data.txt"; + + my $outfile = "new_gnorm_data.txt"; + my $yr = substr( $cycle, 0, 4); + my $mon = substr( $cycle, 4, 2); + my $day = substr( $cycle, 6, 2); + my $hr = substr( $cycle, 8, 2); + + my $newln = sprintf ' %04d,%02d,%02d,%02d,%e,%e,%e,%e,%e%s', + $yr, $mon, $day, $hr, $igrad, $fgnorm, + $avg_gnorm, $min_gnorm, $max_gnorm, "\n"; + + #------------------------------------------------------------- + # attempt to locate the latest $gdfile and copy it locally + # + if( -e $gdfile ) { + open( INFILE, "<${gdfile}" ) or die "Can't open ${gdfile}: $!\n"; + + @filearray = ; + +# This is the mechanism that limits the data to 30 days worth. Should I +# keep it or let the transfer script(s) truncate? 6/12/16 -- I'm going to keep +# it. I can add this as a later change once I add a user mechanism to vary the +# amount of data plotted (on the fly). + + my $cyc_interval = $ENV{'CYCLE_INTERVAL'}; + if( $cyc_interval eq "" ) { + $cyc_interval = 6; + } + + my $max_cyc = 119; # default 30 days worth of data = 120 cycles + # If CYCLE_INTERVAL is other than "" or 6 + # then set the $max_cyc using that interval + if( $cyc_interval != 6 && $cyc_interval != 0 ) { + my $cyc_per_day = 24 / $cyc_interval; + $max_cyc = (30 * $cyc_per_day) - 1; + } + + while( $#filearray > $max_cyc ) { + shift( @filearray ); + } + close( INFILE ); + } + + # Here is the problem Russ encountered after re-running the MinMon: + # If the cycle time in $newln is the same as an existing record in + # *.gnorm_data.txt then we end up with 2+ rows for the same cycle time. + # In that case $newln should replace the first existing line + # in @filearray and all other lines that might match should be deleted. + # Else when the cycle time doesn't already exist (the expected condition) + # it should be pushed into @filearray. + + # algorithm: + # ========= + # Establish $count of matches on "$yr,$mon,$day,$hr" + # if $count > 0 + # while $count > 1 + # get last_index and remove with splice + # replace first_index with $newln + # else + # push $newln + # + my $srch_strng = "$yr,$mon,$day,$hr"; + my $count = true { /$srch_strng/ } @filearray; + + if( $count > 0 ) { + while( $count > 1 ) { + my $l_index = last_index { /$srch_strng/ } @filearray; + splice @filearray, $l_index, 1; + $count = true { /$srch_strng/ } @filearray; + } + my $f_index = first_index { /$srch_strng/ } @filearray; + splice @filearray, $f_index, 1, $newln; + } + else { + push( @filearray, $newln ); + } + + open( OUTFILE, ">$outfile" ) or die "Can't open ${$outfile}: $!\n"; + print OUTFILE @filearray; + close( OUTFILE ); + + system("cp -f $outfile $gdfile"); + +} + +#--------------------------------------------------------------------------- +# makeErrMsg +# +# Apply a gross check on the final value of the gnorm for a specific +# cycle. If the final_gnorm value is greater than the gross_check value +# then put that in the error message file. Also check for resets or a +# premature halt, and journal those events to the error message file too. +# +# Note to self: reset_iter array is passed by reference +#--------------------------------------------------------------------------- +sub makeErrMsg { + my $suffix = $_[0]; + my $cycle = $_[1]; + my $final_gnorm = $_[2]; + my $stop_flag = $_[3]; + my $stop_iter = $_[4]; + my $reset_flag = $_[5]; + my $reset_iter = $_[6]; #reset iteration array + my $infile = $_[7]; + my $gross_check = $_[8]; + + my $mail_msg =""; + my $out_file = "${cycle}.errmsg.txt"; + + + if( $stop_flag > 0 ) { + my $stop_msg = " Gnorm check detected premature iteration stop: suffix = $suffix, cycle = $cycle, iteration = $stop_iter"; + $mail_msg .= $stop_msg; + } + + if( $reset_flag > 0 ) { + my $ctr=0; + my $reset_msg = "\n Gnorm check detected $reset_flag reset(s): suffix = $suffix, cycle = $cycle"; + $mail_msg .= $reset_msg; + $mail_msg .= "\n"; + $mail_msg .= " Reset(s) detected in iteration(s): @{$reset_iter}[$ctr] \n"; + + my $arr_size = @{$reset_iter}; + for( $ctr=1; $ctr < $arr_size; $ctr++ ) { + $mail_msg .= " @{$reset_iter}[$ctr]\n"; + } + } + + if( $final_gnorm >= $gross_check ){ + my $gnorm_msg = " Final gnorm gross check failure: suffix = $suffix, cycle = $cycle, final gnorm = $final_gnorm "; + + $mail_msg .= $gnorm_msg; + } + + if( length $mail_msg > 0 ){ + my $file_msg = " File source for report is: $infile"; + $mail_msg .= $file_msg; + } + + if( length $mail_msg > 0 ){ + my $mail_link = "http://www.emc.ncep.noaa.gov/gmb/gdas/gsi_stat/index.html?src=$suffix&typ=gnorm&cyc=$cycle"; + open( OUTFILE, ">$out_file" ) or die "Can't open ${$out_file}: $!\n"; + print OUTFILE $mail_msg; + print OUTFILE "\n\n $mail_link"; + close( OUTFILE ); + } +} + + +#--------------------------------------------------------------------------- +# +# Main routine begins here +# +#--------------------------------------------------------------------------- + +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_gnorms.pl SUFFIX pdy cyc infile jlogfile\n"; + exit; +} + + +my $suffix = $ARGV[0]; +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; + + +my $scr = "minmon_xtrct_gnorms.pl"; +print "$scr Has Started\n"; + +# +# This needs to be redesigned to get the gnorm value from the gsistat file +# using the line that starts "cost,grad,step,b,step?:". The line formerly +# used for the gnorm and reduction values may not be available if the the +# verbose output flag is set to FALSE. +# +# So, using the grad value on that line: +# gnorm[i] = (grad[i]**)/(grad[0]**) +# reduct[i] = sqrt(gnorm) + +my $igrad_target; +my $igrad_number; +my $expected_gnorms; +my $gross_check_val; + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; + +my $gnormfile = $ENV{"mm_gnormfile"}; + + +if( (-e $gnormfile) ) { + open( GNORMFILE, "<${gnormfile}" ) or die "Can't open ${gnormfile}: $!\n"; + my $line; + + while( $line = ) { + if( $line =~ /igrad_target/ ) { + my @termsline = split( /:/, $line ); + $igrad_target = $termsline[1]; + } elsif( $line =~ /igrad_number/ ) { + my @termsline = split( /:/, $line ); + $igrad_number = $termsline[1]; + } elsif( $line =~ /expected_gnorms/ ){ + my @termsline = split( /:/, $line ); + $expected_gnorms = $termsline[1]; + } elsif( $line =~ /gross_check_val/ ){ + my @termsline = split( /:/, $line ); + $gross_check_val = $termsline[1]; + } + } + close( GNORMFILE ); +} else { + $rc = 4; +} + +if( $rc == 0 ) { + if( (-e $infile) ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my $found_igrad = 0; + my $final_gnorm = 0.0; + my $igrad = 0.0; + my $header = 4; + my $header2 = 0; + my @gnorm_array; + my @last_10_gnorm; + + my $reset_flag = 0; + my $stop_flag = 0; + my $warn_str = "WARNING"; + my $stop_str = "Stopping"; + my $stop_iter = ""; + my $reset_str = "Reset"; + my @reset_iter; # reset iteration array + + my $stop_iter_flag = 0; + my $reset_iter_flag = 0; + my $line; + while( $line = ) { + + ############################################## + # if the reset_iter_flag is 1 then record the + # current outer & inner iteration number + ############################################## + if( $reset_iter_flag == 1 ) { + if( $line =~ /${igrad_target}/ ) { + my @iterline = split( / +/, $line ); + my $iter_str = $iterline[2] . "," . $iterline[3]; + push( @reset_iter, $iter_str); + $reset_iter_flag = 0; + } + } + + + if( $line =~ /${igrad_target}/ ) { + my @gradline = split( / +/, $line ); + + my $grad = $gradline[$igrad_number]; + + if( $found_igrad == 0 ){ + $igrad = $grad; + $found_igrad = 1; + } + + my $igrad_sqr = $igrad**2; + my $grad_sqr = $grad**2; + my $gnorm = $grad_sqr/$igrad_sqr; + + push( @gnorm_array, $gnorm ); + } + + + if( $line =~ /${warn_str}/ ) { + if( $line =~ /${stop_str}/ ) { + $stop_flag++; + $stop_iter_flag=1; + } + elsif( $line =~ /${reset_str}/ ){ + $reset_flag++; + $reset_iter_flag = 1; + } + } + + } + close( INFILE ); + + ######################################################################## + # If the stop_flag is >0 then record the last outer & inner + # iteration number. The trick is that it's the last iteration in the + # log file and we just passed it when we hit the stop warning message, + # so we have to reopen the file and get the last iteration number. + ######################################################################## + if( $stop_flag > 0 ) { + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my @lines = reverse ; + foreach $line (@lines) { + if( $line =~ /${igrad_target}/ ){ + my @iterline = split( / +/, $line ); + $stop_iter = $iterline[2] . "," . $iterline[3]; + last; + } + } + close( INFILE ); + } + + + my @all_gnorm = @gnorm_array; + + ############################################################################## + ## + ## If the iterations were halted due to error then the @all_gnorm array won't + ## be the expected size. In that case we need to pad the array out with + ## RMISS values so GrADS won't choke when it tries to read the data file. + ## + ## Note that we're padding @all_gnorm. The @gnorm_array is examined below + ## and we don't want to pad that and mess up the min/max calculation. + ## + ############################################################################### + my $arr_size = @all_gnorm; + + if( $arr_size < $expected_gnorms ) { + for( my $ctr = $arr_size; $ctr < $expected_gnorms; $ctr++ ) { + push( @all_gnorm, -999.0 ); + } + } + + my $sum_10_gnorm = 0.0; + my $min_gnorm = 9999999.0; + my $max_gnorm = -9999999.0; + my $avg_gnorm = 0.0; + + for( my $ctr = 9; $ctr >= 0; $ctr-- ) { + my $new_gnorm = pop( @gnorm_array ); + $sum_10_gnorm = $sum_10_gnorm + $new_gnorm; + if( $new_gnorm > $max_gnorm ) { + $max_gnorm = $new_gnorm; + } + if( $new_gnorm < $min_gnorm ) { + $min_gnorm = $new_gnorm; + } + if( $ctr == 9 ) { + $final_gnorm = $new_gnorm; + } + } + + $avg_gnorm = $sum_10_gnorm / 10; + + + ##################################################################### + # Update the gnorm_data.txt file with information on the + # initial gradient, final gnorm, and avg/min/max for the last 10 + # iterations. + ##################################################################### + updateGnormData( $cdate,$igrad,$final_gnorm,$avg_gnorm,$min_gnorm,$max_gnorm,$suffix ); + + + ##################################################################### + # Call makeErrMsg to build the error message file to record any + # abnormalities in the minimization. This file can be mailed by + # a calling script. + ##################################################################### + makeErrMsg( $suffix, $cdate, $final_gnorm, $stop_flag, $stop_iter, $reset_flag, \@reset_iter, $infile, $gross_check_val ); + + + ######################################################### + # write to GrADS ready output data file + # + # Note: this uses pack to achieve the same results as + # an unformatted binary Fortran file. + ######################################################### + my $filename2 = "${cdate}.gnorms.ieee_d"; + + open( OUTFILE, ">$filename2" ) or die "Can't open ${filename2}: $!\n"; + binmode OUTFILE; + + print OUTFILE pack( 'f*', @all_gnorm); + + close( OUTFILE ); + + #-------------------------- + # move files to $M_TANKverf + #-------------------------- + my $tankdir = $ENV{"M_TANKverfM0"}; + if(! -d $tankdir) { + system( "mkdir -p $tankdir" ); + } + + if( -e $filename2 ) { + system("cp -f $filename2 ${tankdir}/."); + } + + my $gdfile = "gnorm_data.txt"; + if( -e $gdfile ) { + system("cp -f $gdfile ${tankdir}/."); + } + + my $errmsg = "${cdate}.errmsg.txt"; + if( -e $errmsg ) { + system("cp -f $errmsg ${tankdir}/."); + } + + } # $rc still == 0 after reading gmon_gnorm.txt + +}else { # $infile does not exist + $rc = 3; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/minmon_xtrct_reduct.pl b/ush/minmon_xtrct_reduct.pl new file mode 100755 index 0000000000..f6037d3f32 --- /dev/null +++ b/ush/minmon_xtrct_reduct.pl @@ -0,0 +1,89 @@ +#!/usr/bin/env perl + +use strict; + +#--------------------------------------------------------------------------- +# minmon_xtrct_reduct.pl +# +# Extract the reduction stats for a GSI minimization run and store in +# reduction.ieee_d files ready for GrADS use. +#--------------------------------------------------------------------------- + +if ($#ARGV != 4 ) { + print "usage: minmon_xtrct_reduct.pl SUFFIX pdy cyc infile jlogfile\n"; + print " suffix is data source identifier\n"; + print " pdy is YYYYMMDD of the cycle to be processed\n"; + print " cyc is HH of the cycle to be processed\n"; + print " infile is the data file containing the reduction stats\n"; + print " jlogfile is the job log file\n"; + exit; +} +my $suffix = $ARGV[0]; +my $pdy = $ARGV[1]; +my $cyc = $ARGV[2]; +my $infile = $ARGV[3]; +my $jlogfile = $ARGV[4]; + +my $scr = "minmon_xtrct_reduct.pl"; +print "$scr has started\n"; + +my $rc = 0; +my $cdate = sprintf '%s%s', $pdy, $cyc; +my $initial_gradient = -999.0; +my $iter_gradient; + +if( (-e $infile) ) { + + my $reduct_target = "cost,grad,step,b,step?"; + my $gradient_num = 5; + my $reduct; + + open( INFILE, "<${infile}" ) or die "Can't open ${infile}: $!\n"; + + my @reduct_array; + + while( my $line = ) { + if( $line =~ /$reduct_target/ ) { + my @reduct_ln = split( / +/, $line ); + $iter_gradient = $reduct_ln[$gradient_num]; + if( $initial_gradient == -999.0 ){ + $initial_gradient = $iter_gradient; + } + + $reduct = $iter_gradient / $initial_gradient; + + push( @reduct_array, $reduct ); + } + } + + close( INFILE ); + + + ################################# + # write reduct_array to outfile + ################################# + my $outfile = "${cdate}.reduction.ieee_d"; + open( OUTFILE, ">$outfile" ) or die "Can't open ${outfile}: $!\n"; + binmode OUTFILE; + + print OUTFILE pack( 'f*', @reduct_array); + close( OUTFILE ); + + #---------------------------- + # copy outfile to $M_TANKverf + #---------------------------- + my $tankdir = $ENV{"M_TANKverfM0"}; + if(! -d $tankdir) { + system( "mkdir -p $tankdir" ); + } + + if( -e $outfile ) { + my $newfile = "${tankdir}/${outfile}"; + system("cp -f $outfile $newfile"); + } + +} else { # $infile does not exist + $rc = 5; +} + +print "$scr has ended, return code = $rc \n" diff --git a/ush/mod_icec.sh b/ush/mod_icec.sh index bb8c22182f..96ccab9075 100755 --- a/ush/mod_icec.sh +++ b/ush/mod_icec.sh @@ -1,13 +1,15 @@ -#!/bin/sh -set -x +#! /usr/bin/env bash + #This script is used for modifing icee via land-sea mask #Wen Meng 11/2019: First Version +source "$HOMEgfs/ush/preamble.sh" + f=$1 -export WGRIB2=${WGRIB2:-${NWPROD:-/nwprod}/util/exec/wgrib2} +export WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} -$WGRIB2 $optncpu $f \ +$WGRIB2 ${optncpu:-} $f \ -if 'LAND' -rpn 'sto_1' -fi \ -if 'ICEC' -rpn 'rcl_1:0:==:*' -fi \ -set_grib_type same \ @@ -17,7 +19,3 @@ export err=$?; err_chk mv $f.new $f exit 0 - -#-if 'ICEC' -rpn 'rcl_1:-1:*:1:+:*' -fi \ - - diff --git a/ush/module-setup.sh b/ush/module-setup.sh new file mode 100755 index 0000000000..9c27ab4f7c --- /dev/null +++ b/ush/module-setup.sh @@ -0,0 +1,107 @@ +#!/bin/bash +set -u + +if [[ ${MACHINE_ID} = jet* ]] ; then + # We are on NOAA Jet + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /apps/lmod/lmod/init/bash + fi + export LMOD_SYSTEM_DEFAULT_MODULES=contrib + module reset + +elif [[ ${MACHINE_ID} = hera* ]] ; then + # We are on NOAA Hera + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /apps/lmod/lmod/init/bash + fi + export LMOD_SYSTEM_DEFAULT_MODULES=contrib + module reset + +elif [[ ${MACHINE_ID} = orion* ]] ; then + # We are on Orion + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /apps/lmod/init/bash + fi + export LMOD_SYSTEM_DEFAULT_MODULES=contrib + module reset + +elif [[ ${MACHINE_ID} = s4* ]] ; then + # We are on SSEC Wisconsin S4 + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /usr/share/lmod/lmod/init/bash + fi + export LMOD_SYSTEM_DEFAULT_MODULES=license_intel + module reset + +elif [[ ${MACHINE_ID} = wcoss2 ]]; then + # We are on WCOSS2 + module reset + +elif [[ ${MACHINE_ID} = cheyenne* ]] ; then + # We are on NCAR Cheyenne + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /glade/u/apps/ch/modulefiles/default/localinit/localinit.sh + fi + module purge + +elif [[ ${MACHINE_ID} = stampede* ]] ; then + # We are on TACC Stampede + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /opt/apps/lmod/lmod/init/bash + fi + module purge + +elif [[ ${MACHINE_ID} = gaea* ]] ; then + # We are on GAEA. + if ( ! eval module help > /dev/null 2>&1 ) ; then + # We cannot simply load the module command. The GAEA + # /etc/profile modifies a number of module-related variables + # before loading the module command. Without those variables, + # the module command fails. Hence we actually have to source + # /etc/profile here. + source /etc/profile + __ms_source_etc_profile=yes + else + __ms_source_etc_profile=no + fi + module purge + # clean up after purge + unset _LMFILES_ + unset _LMFILES_000 + unset _LMFILES_001 + unset LOADEDMODULES + module load modules + if [[ -d /opt/cray/ari/modulefiles ]] ; then + module use -a /opt/cray/ari/modulefiles + fi + if [[ -d /opt/cray/pe/ari/modulefiles ]] ; then + module use -a /opt/cray/pe/ari/modulefiles + fi + if [[ -d /opt/cray/pe/craype/default/modulefiles ]] ; then + module use -a /opt/cray/pe/craype/default/modulefiles + fi + if [[ -s /etc/opt/cray/pe/admin-pe/site-config ]] ; then + source /etc/opt/cray/pe/admin-pe/site-config + fi + if [[ "${__ms_source_etc_profile}" == yes ]] ; then + source /etc/profile + unset __ms_source_etc_profile + fi + +elif [[ ${MACHINE_ID} = expanse* ]]; then + # We are on SDSC Expanse + if ( ! eval module help > /dev/null 2>&1 ) ; then + source /etc/profile.d/modules.sh + fi + module purge + module load slurm/expanse/20.02.3 + +elif [[ ${MACHINE_ID} = discover* ]]; then + # We are on NCCS discover + export SPACK_ROOT=/discover/nobackup/mapotts1/spack + export PATH=${PATH}:${SPACK_ROOT}/bin + . "${SPACK_ROOT}"/share/spack/setup-env.sh + +else + echo WARNING: UNKNOWN PLATFORM 1>&2 +fi diff --git a/ush/nems.configure.atm.IN b/ush/nems.configure.atm.IN index 0d95533fa3..c74fe38128 100644 --- a/ush/nems.configure.atm.IN +++ b/ush/nems.configure.atm.IN @@ -1,8 +1,12 @@ # ESMF # -logKindFlag: ESMF_LOGKIND_MULTI +logKindFlag: @[esmf_logkind] +globalResourceControl: true EARTH_component_list: ATM -ATM_model: fv3 +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] + runSeq:: ATM :: diff --git a/ush/nems.configure.atm_aero.IN b/ush/nems.configure.atm_aero.IN index b253255106..dcce57b048 100644 --- a/ush/nems.configure.atm_aero.IN +++ b/ush/nems.configure.atm_aero.IN @@ -3,7 +3,8 @@ ############################################# # ESMF # - logKindFlag: ESMF_LOGKIND_MULTI +logKindFlag: @[esmf_logkind] +globalResourceControl: true # EARTH # EARTH_component_list: ATM CHM @@ -14,6 +15,7 @@ EARTH_attributes:: # ATM # ATM_model: @[atm_model] ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] ATM_attributes:: Verbosity = max :: @@ -21,6 +23,7 @@ ATM_attributes:: # CHM # CHM_model: @[chm_model] CHM_petlist_bounds: @[chm_petlist_bounds] +CHM_omp_num_threads: @[chm_omp_num_threads] CHM_attributes:: Verbosity = max :: diff --git a/ush/nems.configure.blocked_atm_wav.IN b/ush/nems.configure.blocked_atm_wav.IN index 4435cf793e..9aeaefa875 100644 --- a/ush/nems.configure.blocked_atm_wav.IN +++ b/ush/nems.configure.blocked_atm_wav.IN @@ -3,7 +3,8 @@ ############################################# # ESMF # - logKindFlag: ESMF_LOGKIND_MULTI +logKindFlag: @[esmf_logkind] +globalResourceControl: true # EARTH # EARTH_component_list: ATM WAV @@ -14,6 +15,7 @@ EARTH_attributes:: # ATM # ATM_model: @[atm_model] ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] ATM_attributes:: Verbosity = max DumpFields = true @@ -22,6 +24,7 @@ ATM_attributes:: # WAV # WAV_model: @[wav_model] WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] WAV_attributes:: Verbosity = max :: @@ -31,7 +34,7 @@ WAV_attributes:: # Run Sequence # runSeq:: @@[coupling_interval_sec] - ATM -> WAV + ATM -> WAV ATM WAV @ diff --git a/ush/nems.configure.cpld.IN b/ush/nems.configure.cpld.IN index e38f5774a0..abc9091c4e 100644 --- a/ush/nems.configure.cpld.IN +++ b/ush/nems.configure.cpld.IN @@ -3,7 +3,8 @@ ############################################# # ESMF # -logKindFlag: ESMF_LOGKIND_MULTI +logKindFlag: @[esmf_logkind] +globalResourceControl: true # EARTH # EARTH_component_list: MED ATM OCN ICE @@ -14,11 +15,13 @@ EARTH_attributes:: # MED # MED_model: @[med_model] MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] :: # ATM # ATM_model: @[atm_model] ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] ATM_attributes:: Verbosity = 0 DumpFields = @[DumpFields] @@ -29,6 +32,7 @@ ATM_attributes:: # OCN # OCN_model: @[ocn_model] OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] OCN_attributes:: Verbosity = 0 DumpFields = @[DumpFields] @@ -40,6 +44,7 @@ OCN_attributes:: # ICE # ICE_model: @[ice_model] ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] ICE_attributes:: Verbosity = 0 DumpFields = @[DumpFields] diff --git a/ush/nems.configure.cpld_aero_outerwave.IN b/ush/nems.configure.cpld_aero_outerwave.IN new file mode 100644 index 0000000000..3b25faa268 --- /dev/null +++ b/ush/nems.configure.cpld_aero_outerwave.IN @@ -0,0 +1,148 @@ +############################################# +#### NEMS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: MED ATM CHM OCN ICE WAV +EARTH_attributes:: + Verbosity = 0 +:: + +# MED # +MED_model: @[med_model] +MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true +:: + +# CHM # +CHM_model: @[chm_model] +CHM_petlist_bounds: @[chm_petlist_bounds] +CHM_omp_num_threads: @[chm_omp_num_threads] +CHM_attributes:: + Verbosity = 0 +:: + +# OCN # +OCN_model: @[ocn_model] +OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] +OCN_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ocn = @[MESH_OCN_ICE] +:: + +# ICE # +ICE_model: @[ice_model] +ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] +ICE_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ice = @[MESH_OCN_ICE] + stop_n = @[RESTART_N] + stop_option = nhours + stop_ymd = -999 +:: + +# WAV # +WAV_model: @[wav_model] +WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] +WAV_attributes:: + Verbosity = 0 + OverwriteSlice = false + diro = "." + logfile = wav.log + mesh_wav = @[MESH_WAV] + multigrid = @[MULTIGRID] +:: + +# CMEPS warm run sequence +runSeq:: +@@[coupling_interval_slow_sec] + MED med_phases_prep_wav_avg + MED med_phases_prep_ocn_avg + MED -> WAV :remapMethod=redist + MED -> OCN :remapMethod=redist + WAV + OCN + @@[coupling_interval_fast_sec] + MED med_phases_prep_atm + MED med_phases_prep_ice + MED -> ATM :remapMethod=redist + MED -> ICE :remapMethod=redist + ATM phase1 + ATM -> CHM + CHM + CHM -> ATM + ATM phase2 + ICE + ATM -> MED :remapMethod=redist + MED med_phases_post_atm + ICE -> MED :remapMethod=redist + MED med_phases_post_ice + MED med_phases_prep_ocn_accum + MED med_phases_prep_wav_accum + @ + OCN -> MED :remapMethod=redist + WAV -> MED :remapMethod=redist + MED med_phases_post_ocn + MED med_phases_post_wav + MED med_phases_restart_write +@ +:: + +# CMEPS variables + +DRIVER_attributes:: +:: +MED_attributes:: + ATM_model = @[atm_model] + ICE_model = @[ice_model] + OCN_model = @[ocn_model] + WAV_model = @[wav_model] + history_n = 0 + history_option = nhours + history_ymd = -999 + coupling_mode = @[CPLMODE] + history_tile_atm = @[ATMTILESIZE] +:: +ALLCOMP_attributes:: + ScalarFieldCount = 2 + ScalarFieldIdxGridNX = 1 + ScalarFieldIdxGridNY = 2 + ScalarFieldName = cpl_scalars + start_type = @[RUNTYPE] + restart_dir = RESTART/ + case_name = ufs.cpld + restart_n = @[RESTART_N] + restart_option = nhours + restart_ymd = -999 + dbug_flag = @[cap_dbug_flag] + use_coldstart = @[use_coldstart] + use_mommesh = @[use_mommesh] + eps_imesh = @[eps_imesh] + stop_n = @[FHMAX] + stop_option = nhours + stop_ymd = -999 +:: diff --git a/ush/nems.configure.cpld_aero_wave.IN b/ush/nems.configure.cpld_aero_wave.IN index 166a51ae09..6b886b0626 100644 --- a/ush/nems.configure.cpld_aero_wave.IN +++ b/ush/nems.configure.cpld_aero_wave.IN @@ -3,7 +3,8 @@ ############################################# # ESMF # - logKindFlag: ESMF_LOGKIND_MULTI +logKindFlag: @[esmf_logkind] +globalResourceControl: true # EARTH # EARTH_component_list: MED ATM CHM OCN ICE WAV @@ -14,11 +15,13 @@ EARTH_attributes:: # MED # MED_model: @[med_model] MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] :: # ATM # ATM_model: @[atm_model] ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] ATM_attributes:: Verbosity = 0 DumpFields = @[DumpFields] @@ -29,6 +32,7 @@ ATM_attributes:: # CHM # CHM_model: @[chm_model] CHM_petlist_bounds: @[chm_petlist_bounds] +CHM_omp_num_threads: @[chm_omp_num_threads] CHM_attributes:: Verbosity = 0 :: @@ -36,6 +40,7 @@ CHM_attributes:: # OCN # OCN_model: @[ocn_model] OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] OCN_attributes:: Verbosity = 0 DumpFields = @[DumpFields] @@ -47,6 +52,7 @@ OCN_attributes:: # ICE # ICE_model: @[ice_model] ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] ICE_attributes:: Verbosity = 0 DumpFields = @[DumpFields] @@ -61,9 +67,14 @@ ICE_attributes:: # WAV # WAV_model: @[wav_model] WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] WAV_attributes:: Verbosity = 0 OverwriteSlice = false + diro = "." + logfile = wav.log + mesh_wav = @[MESH_WAV] + multigrid = @[MULTIGRID] :: # CMEPS warm run sequence @@ -71,17 +82,15 @@ runSeq:: @@[coupling_interval_slow_sec] MED med_phases_prep_ocn_avg MED -> OCN :remapMethod=redist - OCN -> WAV - WAV -> OCN :srcMaskValues=1 OCN @@[coupling_interval_fast_sec] MED med_phases_prep_atm MED med_phases_prep_ice + MED med_phases_prep_wav_accum + MED med_phases_prep_wav_avg MED -> ATM :remapMethod=redist MED -> ICE :remapMethod=redist - WAV -> ATM :srcMaskValues=1 - ATM -> WAV - ICE -> WAV + MED -> WAV :remapMethod=redist ATM phase1 ATM -> CHM CHM @@ -93,6 +102,8 @@ runSeq:: MED med_phases_post_atm ICE -> MED :remapMethod=redist MED med_phases_post_ice + WAV -> MED :remapMethod=redist + MED med_phases_post_wav MED med_phases_prep_ocn_accum @ OCN -> MED :remapMethod=redist @@ -109,6 +120,7 @@ MED_attributes:: ATM_model = @[atm_model] ICE_model = @[ice_model] OCN_model = @[ocn_model] + WAV_model = @[wav_model] history_n = 0 history_option = nhours history_ymd = -999 diff --git a/ush/nems.configure.cpld_outerwave.IN b/ush/nems.configure.cpld_outerwave.IN new file mode 100644 index 0000000000..ec30d132a7 --- /dev/null +++ b/ush/nems.configure.cpld_outerwave.IN @@ -0,0 +1,136 @@ +############################################# +#### NEMS Run-Time Configuration File ##### +############################################# + +# ESMF # +logKindFlag: @[esmf_logkind] +globalResourceControl: true + +# EARTH # +EARTH_component_list: MED ATM OCN ICE WAV +EARTH_attributes:: + Verbosity = 0 +:: + +# MED # +MED_model: @[med_model] +MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] +:: + +# ATM # +ATM_model: @[atm_model] +ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] +ATM_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true +:: + +# OCN # +OCN_model: @[ocn_model] +OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] +OCN_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ocn = @[MESH_OCN_ICE] +:: + +# ICE # +ICE_model: @[ice_model] +ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] +ICE_attributes:: + Verbosity = 0 + DumpFields = @[DumpFields] + ProfileMemory = false + OverwriteSlice = true + mesh_ice = @[MESH_OCN_ICE] + stop_n = @[RESTART_N] + stop_option = nhours + stop_ymd = -999 +:: + +# WAV # +WAV_model: @[wav_model] +WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] +WAV_attributes:: + Verbosity = 0 + OverwriteSlice = false + diro = "." + logfile = wav.log + mesh_wav = @[MESH_WAV] + multigrid = @[MULTIGRID] +:: + +# CMEPS warm run sequence +runSeq:: +@@[coupling_interval_slow_sec] + MED med_phases_prep_wav_avg + MED med_phases_prep_ocn_avg + MED -> WAV :remapMethod=redist + MED -> OCN :remapMethod=redist + WAV + OCN + @@[coupling_interval_fast_sec] + MED med_phases_prep_atm + MED med_phases_prep_ice + MED -> ATM :remapMethod=redist + MED -> ICE :remapMethod=redist + ATM + ICE + ATM -> MED :remapMethod=redist + MED med_phases_post_atm + ICE -> MED :remapMethod=redist + MED med_phases_post_ice + MED med_phases_prep_ocn_accum + MED med_phases_prep_wav_accum + @ + OCN -> MED :remapMethod=redist + WAV -> MED :remapMethod=redist + MED med_phases_post_ocn + MED med_phases_post_wav + MED med_phases_restart_write +@ +:: + +# CMEPS variables + +DRIVER_attributes:: +:: +MED_attributes:: + ATM_model = @[atm_model] + ICE_model = @[ice_model] + OCN_model = @[ocn_model] + WAV_model = @[wav_model] + history_n = 0 + history_option = nhours + history_ymd = -999 + coupling_mode = @[CPLMODE] + history_tile_atm = @[ATMTILESIZE] +:: +ALLCOMP_attributes:: + ScalarFieldCount = 2 + ScalarFieldIdxGridNX = 1 + ScalarFieldIdxGridNY = 2 + ScalarFieldName = cpl_scalars + start_type = @[RUNTYPE] + restart_dir = RESTART/ + case_name = ufs.cpld + restart_n = @[RESTART_N] + restart_option = nhours + restart_ymd = -999 + dbug_flag = @[cap_dbug_flag] + use_coldstart = @[use_coldstart] + use_mommesh = @[use_mommesh] + eps_imesh = @[eps_imesh] + stop_n = @[FHMAX] + stop_option = nhours + stop_ymd = -999 +:: diff --git a/ush/nems.configure.cpld_wave.IN b/ush/nems.configure.cpld_wave.IN index d99d1c3bf4..f2843a5b2c 100644 --- a/ush/nems.configure.cpld_wave.IN +++ b/ush/nems.configure.cpld_wave.IN @@ -3,7 +3,8 @@ ############################################# # ESMF # -logKindFlag: ESMF_LOGKIND_MULTI +logKindFlag: @[esmf_logkind] +globalResourceControl: true # EARTH # EARTH_component_list: MED ATM OCN ICE WAV @@ -14,11 +15,13 @@ EARTH_attributes:: # MED # MED_model: @[med_model] MED_petlist_bounds: @[med_petlist_bounds] +MED_omp_num_threads: @[med_omp_num_threads] :: # ATM # ATM_model: @[atm_model] ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] ATM_attributes:: Verbosity = 0 DumpFields = @[DumpFields] @@ -29,6 +32,7 @@ ATM_attributes:: # OCN # OCN_model: @[ocn_model] OCN_petlist_bounds: @[ocn_petlist_bounds] +OCN_omp_num_threads: @[ocn_omp_num_threads] OCN_attributes:: Verbosity = 0 DumpFields = @[DumpFields] @@ -40,6 +44,7 @@ OCN_attributes:: # ICE # ICE_model: @[ice_model] ICE_petlist_bounds: @[ice_petlist_bounds] +ICE_omp_num_threads: @[ice_omp_num_threads] ICE_attributes:: Verbosity = 0 DumpFields = @[DumpFields] @@ -54,9 +59,14 @@ ICE_attributes:: # WAV # WAV_model: @[wav_model] WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] WAV_attributes:: Verbosity = 0 OverwriteSlice = false + diro = "." + logfile = wav.log + mesh_wav = @[MESH_WAV] + multigrid = @[MULTIGRID] :: # CMEPS warm run sequence @@ -64,17 +74,15 @@ runSeq:: @@[coupling_interval_slow_sec] MED med_phases_prep_ocn_avg MED -> OCN :remapMethod=redist - OCN -> WAV - WAV -> OCN :srcMaskValues=1 OCN @@[coupling_interval_fast_sec] MED med_phases_prep_atm MED med_phases_prep_ice + MED med_phases_prep_wav_accum + MED med_phases_prep_wav_avg MED -> ATM :remapMethod=redist MED -> ICE :remapMethod=redist - WAV -> ATM :srcMaskValues=1 - ATM -> WAV - ICE -> WAV + MED -> WAV :remapMethod=redist ATM ICE WAV @@ -82,6 +90,8 @@ runSeq:: MED med_phases_post_atm ICE -> MED :remapMethod=redist MED med_phases_post_ice + WAV -> MED :remapMethod=redist + MED med_phases_post_wav MED med_phases_prep_ocn_accum @ OCN -> MED :remapMethod=redist @@ -98,6 +108,7 @@ MED_attributes:: ATM_model = @[atm_model] ICE_model = @[ice_model] OCN_model = @[ocn_model] + WAV_model = @[wav_model] history_n = 0 history_option = nhours history_ymd = -999 diff --git a/ush/nems.configure.leapfrog_atm_wav.IN b/ush/nems.configure.leapfrog_atm_wav.IN index 5c01c08a4f..b302a27e8a 100644 --- a/ush/nems.configure.leapfrog_atm_wav.IN +++ b/ush/nems.configure.leapfrog_atm_wav.IN @@ -3,7 +3,8 @@ ############################################# # ESMF # - logKindFlag: ESMF_LOGKIND_MULTI +logKindFlag: @[esmf_logkind] +globalResourceControl: true # EARTH # EARTH_component_list: ATM WAV @@ -14,6 +15,7 @@ EARTH_attributes:: # ATM # ATM_model: @[atm_model] ATM_petlist_bounds: @[atm_petlist_bounds] +ATM_omp_num_threads: @[atm_omp_num_threads] ATM_attributes:: Verbosity = max DumpFields = true @@ -22,6 +24,7 @@ ATM_attributes:: # WAV # WAV_model: @[wav_model] WAV_petlist_bounds: @[wav_petlist_bounds] +WAV_omp_num_threads: @[wav_omp_num_threads] WAV_attributes:: Verbosity = max :: @@ -32,7 +35,7 @@ WAV_attributes:: runSeq:: @@[coupling_interval_slow_sec] ATM - ATM -> WAV + ATM -> WAV WAV @ :: diff --git a/ush/nems_configure.sh b/ush/nems_configure.sh index d8de1d849d..7645c9e76b 100755 --- a/ush/nems_configure.sh +++ b/ush/nems_configure.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#! /usr/bin/env bash ##### ## This script writes nems.configure file @@ -7,114 +7,138 @@ ## $cpl** switches. ## ## This is a child script of modular -## forecast script. This script is definition only +## forecast script. This script is definition only (Is it? There is nothing defined here being used outside this script.) ##### writing_nems_configure() { echo "SUB ${FUNCNAME[0]}: parsing_nems_configure begins" -if [ -e $SCRIPTDIR/nems.configure ]; then - rm -f $SCRIPTDIR/nems.configure +if [[ -e "${SCRIPTDIR}/nems.configure" ]]; then + rm -f "${SCRIPTDIR}/nems.configure" fi # Setup nems.configure -DumpFields=${NEMSDumpFields:-false} -cap_dbug_flag=${cap_dbug_flag:-0} -if [ $warm_start = ".true." ]; then - cmeps_run_type='continue' +local DumpFields=${NEMSDumpFields:-false} +local cap_dbug_flag=${cap_dbug_flag:-0} +# Determine "cmeps_run_type" based on the availability of the mediator restart file +# If it is a warm_start, we already copied the mediator restart to DATA, if it was present +# If the mediator restart was not present, despite being a "warm_start", we put out a WARNING +# in forecast_postdet.sh +if [[ -f "${DATA}/ufs.cpld.cpl.r.nc" ]]; then + local cmeps_run_type='continue' else - cmeps_run_type='startup' -fi -restart_interval=${restart_interval:-3024000} # Interval in seconds to write restarts - -ATM_model=${ATM_model:-'fv3'} -OCN_model=${OCN_model:-'mom6'} -ICE_model=${ICE_model:-'cice'} -WAV_model=${WAV_model:-'ww3'} -CHM_model=${CHM_model:-'gocart'} - -ATMPETS=${ATMPETS:-8} -MEDPETS=${MEDPETS:-8} -OCNPETS=${OCNPETS:-0} -ICEPETS=${ICEPETS:-0} -WAVPETS=${WAVPETS:-0} -CHMPETS=${CHMPETS:-${ATMPETS}} - -USE_MOMMESH=${USE_MOMMESH:-"true"} -MESH_OCN_ICE=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"} - -if [[ $OCNRES = "100" ]]; then - EPS_IMESH='2.5e-1' -else - EPS_IMESH='1.0e-1' + local cmeps_run_type='startup' fi +local res_int=${restart_interval:-3024000} # Interval in seconds to write restarts -rm -f $DATA/nems.configure +rm -f "${DATA}/nems.configure" -med_petlist_bounds=${med_petlist_bounds:-"0 $(( $MEDPETS-1 ))"} -atm_petlist_bounds=${atm_petlist_bounds:-"0 $(( $ATMPETS-1 ))"} -ocn_petlist_bounds=${ocn_petlist_bounds:-"$ATMPETS $(( $ATMPETS+$OCNPETS-1 ))"} -ice_petlist_bounds=${ice_petlist_bounds:-"$(( $ATMPETS+$OCNPETS )) $(( $ATMPETS+$OCNPETS+$ICEPETS-1 ))"} -wav_petlist_bounds=${wav_petlist_bounds:-"$(( $ATMPETS+$OCNPETS+$ICEPETS )) $(( $ATMPETS+$OCNPETS+$ICEPETS+$WAVPETS-1 ))"} -chm_petlist_bounds=${chm_petlist_bounds:-"0 $(( $CHMPETS-1 ))"} +local esmf_logkind=${esmf_logkind:-"ESMF_LOGKIND_MULTI"} #options: ESMF_LOGKIND_MULTI_ON_ERROR, ESMF_LOGKIND_MULTI, ESMF_LOGKIND_NONE # Copy the selected template into run directory -infile="$SCRIPTDIR/nems.configure.$confignamevarfornems.IN" -if [ -s $infile ]; then - cp $infile tmp1 +infile="${SCRIPTDIR}/nems.configure.${confignamevarfornems}.IN" +if [[ -s ${infile} ]]; then + cp "${infile}" tmp1 else - echo "FATAL ERROR: nem.configure template '$infile' does not exist!" + echo "FATAL ERROR: nem.configure template '${infile}' does not exist!" exit 1 fi + +local atm_petlist_bounds="0 $(( ${ATMPETS}-1 ))" +local med_petlist_bounds="0 $(( ${MEDPETS}-1 ))" + +sed -i -e "s;@\[atm_model\];fv3;g" tmp1 +sed -i -e "s;@\[atm_petlist_bounds\];${atm_petlist_bounds};g" tmp1 +sed -i -e "s;@\[atm_omp_num_threads\];${ATMTHREADS};g" tmp1 sed -i -e "s;@\[med_model\];cmeps;g" tmp1 -sed -i -e "s;@\[atm_model\];$ATM_model;g" tmp1 -sed -i -e "s;@\[med_petlist_bounds\];$med_petlist_bounds;g" tmp1 -sed -i -e "s;@\[atm_petlist_bounds\];$atm_petlist_bounds;g" tmp1 +sed -i -e "s;@\[med_petlist_bounds\];${med_petlist_bounds};g" tmp1 +sed -i -e "s;@\[med_omp_num_threads\];${MEDTHREADS};g" tmp1 +sed -i -e "s;@\[esmf_logkind\];${esmf_logkind};g" tmp1 -if [ $cpl = ".true." ]; then - sed -i -e "s;@\[coupling_interval_slow_sec\];$CPL_SLOW;g" tmp1 +if [[ "${cpl}" = ".true." ]]; then + sed -i -e "s;@\[coupling_interval_slow_sec\];${CPL_SLOW};g" tmp1 fi -if [ $cplflx = .true. ]; then - if [ $restart_interval -gt 0 ]; then - restart_interval_nems=$restart_interval +if [[ "${cplflx}" = ".true." ]]; then + if [[ ${res_int} -gt 0 ]]; then + local restart_interval_nems=${res_int} else - restart_interval_nems=$FHMAX + local restart_interval_nems=${FHMAX} fi - sed -i -e "s;@\[ocn_model\];$OCN_model;g" tmp1 - sed -i -e "s;@\[ocn_petlist_bounds\];$ocn_petlist_bounds;g" tmp1 - sed -i -e "s;@\[DumpFields\];$DumpFields;g" tmp1 - sed -i -e "s;@\[cap_dbug_flag\];$cap_dbug_flag;g" tmp1 - sed -i -e "s;@\[use_coldstart\];$use_coldstart;g" tmp1 - sed -i -e "s;@\[RUNTYPE\];$cmeps_run_type;g" tmp1 - sed -i -e "s;@\[CPLMODE\];$cplmode;g" tmp1 - sed -i -e "s;@\[restart_interval\];$restart_interval;g" tmp1 - sed -i -e "s;@\[coupling_interval_fast_sec\];$CPL_FAST;g" tmp1 - sed -i -e "s;@\[RESTART_N\];$restart_interval_nems;g" tmp1 - sed -i -e "s;@\[use_mommesh\];$USE_MOMMESH;g" tmp1 - sed -i -e "s;@\[eps_imesh\];$EPS_IMESH;g" tmp1 - sed -i -e "s;@\[ATMTILESIZE\];$RESTILE;g" tmp1 + + # TODO: Should this be raised up to config.ufs or config.ocn? + case "${OCNRES}" in + "500") local eps_imesh="4.0e-1";; + "100") local eps_imesh="2.5e-1";; + *) local eps_imesh="1.0e-1";; + esac + + local use_coldstart=${use_coldstart:-".false."} + local use_mommesh=${USE_MOMMESH:-"true"} + local restile=$(echo "${CASE}" |cut -c2-) + + local start="${ATMPETS}" + local end="$(( ${start}+${OCNPETS}-1 ))" + local ocn_petlist_bounds="${start} ${end}" + + sed -i -e "s;@\[ocn_model\];mom6;g" tmp1 + sed -i -e "s;@\[ocn_petlist_bounds\];${ocn_petlist_bounds};g" tmp1 + sed -i -e "s;@\[ocn_omp_num_threads\];${OCNTHREADS};g" tmp1 + sed -i -e "s;@\[DumpFields\];${DumpFields};g" tmp1 + sed -i -e "s;@\[cap_dbug_flag\];${cap_dbug_flag};g" tmp1 + sed -i -e "s;@\[use_coldstart\];${use_coldstart};g" tmp1 + sed -i -e "s;@\[RUNTYPE\];${cmeps_run_type};g" tmp1 + sed -i -e "s;@\[CPLMODE\];${cplmode};g" tmp1 + sed -i -e "s;@\[restart_interval\];${res_int};g" tmp1 + sed -i -e "s;@\[coupling_interval_fast_sec\];${CPL_FAST};g" tmp1 + sed -i -e "s;@\[RESTART_N\];${restart_interval_nems};g" tmp1 + sed -i -e "s;@\[use_mommesh\];${use_mommesh};g" tmp1 + sed -i -e "s;@\[eps_imesh\];${eps_imesh};g" tmp1 + sed -i -e "s;@\[ATMTILESIZE\];${restile};g" tmp1 fi -if [ $cplwav = .true. ]; then - sed -i -e "s;@\[wav_model\];ww3;g" tmp1 - sed -i -e "s;@\[wav_petlist_bounds\];$wav_petlist_bounds;g" tmp1 + +if [[ "${cplice}" = ".true." ]]; then + + local mesh_ocn_ice=${MESH_OCN_ICE:-"mesh.mx${ICERES}.nc"} + + local start="$(( ${ATMPETS}+${OCNPETS} ))" + local end="$(( ${start}+${ICEPETS}-1 ))" + local ice_petlist_bounds="${start} ${end}" + + sed -i -e "s;@\[ice_model\];cice6;g" tmp1 + sed -i -e "s;@\[ice_petlist_bounds\];${ice_petlist_bounds};g" tmp1 + sed -i -e "s;@\[ice_omp_num_threads\];${ICETHREADS};g" tmp1 + sed -i -e "s;@\[MESH_OCN_ICE\];${mesh_ocn_ice};g" tmp1 + sed -i -e "s;@\[FHMAX\];${FHMAX_GFS};g" tmp1 fi -if [ $cplice = .true. ]; then - sed -i -e "s;@\[ice_model\];$ICE_model;g" tmp1 - sed -i -e "s;@\[ice_petlist_bounds\];$ice_petlist_bounds;g" tmp1 - sed -i -e "s;@\[MESH_OCN_ICE\];$MESH_OCN_ICE;g" tmp1 - sed -i -e "s;@\[FHMAX\];$FHMAX_GFS;g" tmp1 + +if [[ "${cplwav}" = ".true." ]]; then + + local start="$(( ${ATMPETS}+${OCNPETS:-0}+${ICEPETS:-0} ))" + local end="$(( ${start}+${WAVPETS}-1 ))" + local wav_petlist_bounds="${start} ${end}" + + sed -i -e "s;@\[wav_model\];ww3;g" tmp1 + sed -i -e "s;@\[wav_petlist_bounds\];${wav_petlist_bounds};g" tmp1 + sed -i -e "s;@\[wav_omp_num_threads\];${WAVTHREADS};g" tmp1 + sed -i -e "s;@\[MESH_WAV\];${MESH_WAV};g" tmp1 + sed -i -e "s;@\[MULTIGRID\];${waveMULTIGRID};g" tmp1 fi -if [ $cplchm = .true. ]; then - sed -i -e "s;@\[chm_model\];$CHM_model;g" tmp1 - sed -i -e "s;@\[chm_petlist_bounds\];$chm_petlist_bounds;g" tmp1 - sed -i -e "s;@\[coupling_interval_fast_sec\];$CPL_FAST;g" tmp1 + +if [[ "${cplchm}" = ".true." ]]; then + + local chm_petlist_bounds="0 $(( ${CHMPETS}-1 ))" + + sed -i -e "s;@\[chm_model\];gocart;g" tmp1 + sed -i -e "s;@\[chm_petlist_bounds\];${chm_petlist_bounds};g" tmp1 + sed -i -e "s;@\[chm_omp_num_threads\];${CHMTHREADS};g" tmp1 + sed -i -e "s;@\[coupling_interval_fast_sec\];${CPL_FAST};g" tmp1 fi mv tmp1 nems.configure echo "$(cat nems.configure)" -if [ $cplflx = .true. ]; then +if [[ "${cplflx}" = ".true." ]]; then #Create other CMEPS mediator related files cat > pio_in << EOF @@ -171,8 +195,8 @@ echo "$(cat med_modelio.nml)" fi -cp $HOMEgfs/sorc/ufs_model.fd/tests/parm/fd_nems.yaml fd_nems.yaml +${NCP} "${HOMEgfs}/sorc/ufs_model.fd/tests/parm/fd_nems.yaml" fd_nems.yaml -echo "SUB ${FUNCNAME[0]}: Nems configured for $confignamevarfornems" +echo "SUB ${FUNCNAME[0]}: Nems configured for ${confignamevarfornems}" } diff --git a/ush/ocnpost.ncl b/ush/ocnpost.ncl index 81f24673fc..27e60b0edf 100755 --- a/ush/ocnpost.ncl +++ b/ush/ocnpost.ncl @@ -93,7 +93,8 @@ begin ; pull from environment COMDIR = getenv("COMOUTocean") IDATE = getenv("IDATE") - FHR2 = getenv("FHR") + VDATE = getenv("VDATE") + FHR2 = getenv("FHR") FHR=FHR2 ENSMEM = getenv("ENSMEM") DATA_TMP = getenv("DATA") @@ -101,7 +102,7 @@ begin ; nemsrc = "/scratch2/NCEPDEV/climate/Bin.Li/S2S/fix/ocean_ice_post/FIXDIR/" ; calculate and break apart verification date - VDATE = tochar(systemfunc("$NDATE "+FHR+" "+IDATE)) + ; VDATE = tochar(systemfunc("$NDATE "+FHR+" "+IDATE)) ; YYYY = tostring(VDATE(0:3)) ; MM = tostring(VDATE(4:5)) ; DD = tostring(VDATE(6:7)) diff --git a/ush/ozn_xtrct.sh b/ush/ozn_xtrct.sh new file mode 100755 index 0000000000..3f6b3fed19 --- /dev/null +++ b/ush/ozn_xtrct.sh @@ -0,0 +1,261 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +#------------------------------------------------------------------ +# ozn_xtrct.sh +# +# This script performs the data extraction from the oznstat +# diagnostic files. The resulting data (*.ieee_d) files, GrADS +# control files and stdout files will be moved to the +# $TANKverf_ozn. +# +# Calling scripts must define: +# $TANKverf_ozn +# $HOMEoznmon +# $PDATE +# +# Return values are +# 0 = normal +# 2 = unable to generate satype list; may indicate no diag +# files found in oznstat file +#------------------------------------------------------------------ + +#-------------------------------------------------- +# check_diag_files +# +# Compare $satype (which contains the contents of +# gdas_oznmon_satype.txt to $avail_satype which is +# determined by the contents of the oznstat file. +# Report any missing diag files in a file named +# bad_diag.$PDATE +# +check_diag_files() { + pdate=$1 + found_satype=$2 + avail_satype=$3 + + out_file="bad_diag.${pdate}" + + echo ""; echo ""; echo "--> check_diag_files" + + for type in ${found_satype}; do + len_check=$(echo ${avail_satype} | grep ${type} | wc -c) + + if [[ ${len_check} -le 1 ]]; then + echo "missing diag file -- diag_${type}_ges.${pdate}.gz not found " >> ./${out_file} + fi + done + + echo "<-- check_diag_files"; echo ""; echo "" +} + + +iret=0 +export NCP=${NCP:-/bin/cp} +VALIDATE_DATA=${VALIDATE_DATA:-0} +nregion=${nregion:-6} +DO_DATA_RPT=${DO_DATA_RPT:-0} + +netcdf_boolean=".false." +if [[ $OZNMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi + +OZNMON_NEW_HDR=${OZNMON_NEW_HDR:-0} +new_hdr="F" +if [[ $OZNMON_NEW_HDR -eq 1 ]]; then + new_hdr="T" +fi + +#------------------------------------------------------------------ +# if VALIDATE_DATA then locate and untar base file +# +validate=".FALSE." +if [[ $VALIDATE_DATA -eq 1 ]]; then + if [[ ! -e $ozn_val_file && ! -h $ozn_val_file ]]; then + echo "WARNING: VALIDATE_DATA set to 1, but unable to locate $ozn_val_file" + echo " Setting VALIDATE_DATA to 0/OFF" + VALIDATE_DATA=0 + else + validate=".TRUE." + val_file=$(basename ${ozn_val_file}) + ${NCP} $ozn_val_file $val_file + tar -xvf $val_file + fi +fi +echo "VALIDATE_DATA, validate = $VALIDATE_DATA, $validate " + + + +#------------------------------------------------------------------ +# ozn_ptype here is the processing type which is intended to be "ges" +# or "anl". Default is "ges". +# +ozn_ptype=${ozn_ptype:-"ges anl"} + + +#--------------------------------------------------------------------------- +# Build satype list from the available diag files. +# +# An empty satype list means there are no diag files to process. That's +# a problem, reported by an iret value of 2 +# + +avail_satype=$(ls -1 d*ges* | sed -e 's/_/ /g;s/\./ /' | gawk '{ print $2 "_" $3 }') + +if [[ ${DO_DATA_RPT} -eq 1 ]]; then + if [[ -e ${SATYPE_FILE} ]]; then + satype=$(cat ${SATYPE_FILE}) + check_diag_files ${PDATE} "${satype}" "${avail_satype}" + else + echo "WARNING: missing ${SATYPE_FILE}" + fi +fi + +len_satype=$(echo -n "${satype}" | wc -c) + +if [[ ${len_satype} -le 1 ]]; then + satype=${avail_satype} +fi + +echo ${satype} + + +len_satype=$(echo -n "${satype}" | wc -c) + +if [[ ${DO_DATA_RPT} -eq 1 && ${len_satype} -lt 1 ]]; then + iret=2 + +else + + #-------------------------------------------------------------------- + # Copy extraction programs to working directory + # + ${NCP} ${HOMEoznmon}/exec/oznmon_time.x ./oznmon_time.x + if [[ ! -e oznmon_time.x ]]; then + iret=2 + exit ${iret} + fi + ${NCP} ${HOMEoznmon}/exec/oznmon_horiz.x ./oznmon_horiz.x + if [[ ! -e oznmon_horiz.x ]]; then + iret=3 + exit ${iret} + fi + + + #--------------------------------------------------------------------------- + # Outer loop over $ozn_ptype (default values 'ges', 'anl') + # + for ptype in ${ozn_ptype}; do + + iyy=$(echo ${PDATE} | cut -c1-4) + imm=$(echo ${PDATE} | cut -c5-6) + idd=$(echo ${PDATE} | cut -c7-8) + ihh=$(echo ${PDATE} | cut -c9-10) + + for type in ${avail_satype}; do + if [[ -f "diag_${type}_${ptype}.${PDATE}.gz" ]]; then + mv diag_${type}_${ptype}.${PDATE}.gz ${type}.${ptype}.gz + gunzip ./${type}.${ptype}.gz + + echo "processing ptype, type: ${ptype}, ${type}" + rm -f input + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=6, + nregion=${nregion}, + region(1)='global', rlonmin(1)=-180.0,rlonmax(1)=180.0,rlatmin(1)=-90.0,rlatmax(1)= 90.0, + region(2)='70N-90N', rlonmin(2)=-180.0,rlonmax(2)=180.0,rlatmin(2)= 70.0,rlatmax(2)= 90.0, + region(3)='20N-70N', rlonmin(3)=-180.0,rlonmax(3)=180.0,rlatmin(3)= 20.0,rlatmax(3)= 70.0, + region(4)='20S-20N', rlonmin(4)=-180.0,rlonmax(4)=180.0,rlatmin(4)=-20.0,rlatmax(4)= 20.0, + region(5)='20S-70S', rlonmin(5)=-180.0,rlonmax(5)=180.0,rlatmin(5)=-70.0,rlatmax(5)=-20.0, + region(6)='70S-90S', rlonmin(6)=-180.0,rlonmax(6)=180.0,rlatmin(6)=-90.0,rlatmax(6)=-70.0, + validate=${validate}, + new_hdr=${new_hdr}, + ptype=${ptype}, + netcdf=${netcdf_boolean} + / +EOF + + + echo "oznmon_time.x HAS STARTED ${type}" + + ./oznmon_time.x < input > stdout.time.${type}.${ptype} + + echo "oznmon_time.x HAS ENDED ${type}" + + if [[ ! -d ${TANKverf_ozn}/time ]]; then + mkdir -p ${TANKverf_ozn}/time + fi + $NCP ${type}.${ptype}.ctl ${TANKverf_ozn}/time/ + $NCP ${type}.${ptype}.${PDATE}.ieee_d ${TANKverf_ozn}/time/ + + $NCP bad* ${TANKverf_ozn}/time/ + + rm -f input + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-18, + incr=6, + new_hdr=${new_hdr}, + ptype=${ptype}, + netcdf=${netcdf_boolean} + / +EOF + + echo "oznmon_horiz.x HAS STARTED ${type}" + + ./oznmon_horiz.x < input > stdout.horiz.${type}.${ptype} + + echo "oznmon_horiz.x HAS ENDED ${type}" + + if [[ ! -d ${TANKverf_ozn}/horiz ]]; then + mkdir -p ${TANKverf_ozn}/horiz + fi + $NCP ${type}.${ptype}.ctl ${TANKverf_ozn}/horiz/ + + $COMPRESS ${type}.${ptype}.${PDATE}.ieee_d + $NCP ${type}.${ptype}.${PDATE}.ieee_d.${Z} ${TANKverf_ozn}/horiz/ + + + echo "finished processing ptype, type: ${ptype}, ${type}" + + else + echo "diag file for ${type}.${ptype} not found" + fi + + done # type in satype + + done # ptype in $ozn_ptype + + tar -cvf stdout.horiz.tar stdout.horiz* + ${COMPRESS} stdout.horiz.tar + ${NCP} stdout.horiz.tar.${Z} ${TANKverf_ozn}/horiz/ + + tar -cvf stdout.time.tar stdout.time* + ${COMPRESS} stdout.time.tar + ${NCP} stdout.time.tar.${Z} ${TANKverf_ozn}/time/ +fi + +#------------------------------------------------------- +# Conditionally remove data files older than 40 days +# +if [[ ${CLEAN_TANKDIR:-0} -eq 1 ]]; then + ${HOMEoznmon}/ush/clean_tankdir.sh glb 40 +fi + +exit ${iret} diff --git a/ush/parsing_model_configure_DATM.sh b/ush/parsing_model_configure_DATM.sh index a2e7c8c918..ecd3fa6dd6 100755 --- a/ush/parsing_model_configure_DATM.sh +++ b/ush/parsing_model_configure_DATM.sh @@ -1,4 +1,4 @@ -#! /bin/sh +#! /usr/bin/env bash ##### ## "parsing_model_configure_DATM.sh" diff --git a/ush/parsing_model_configure_FV3.sh b/ush/parsing_model_configure_FV3.sh index 4c35179e90..91b82a0d76 100755 --- a/ush/parsing_model_configure_FV3.sh +++ b/ush/parsing_model_configure_FV3.sh @@ -1,4 +1,4 @@ -#! /bin/sh +#! /usr/bin/env bash ##### ## "parsing_model_configure_FV3.sh" @@ -12,6 +12,13 @@ FV3_model_configure(){ +local restile=$(echo "${CASE}" |cut -c2-) +local ichunk2d=$((4*restile)) +local jchunk2d=$((2*restile)) +local ichunk3d=$((4*restile)) +local jchunk3d=$((2*restile)) +local kchunk3d=1 + rm -f model_configure cat >> model_configure < ice_in < ice_in < ice_in < ice_in < ice_in < ice_in <> diag_table fi -if [ ! -z "${AERO_DIAG_TABLE}" ]; then +if [ ! -z "${AERO_DIAG_TABLE:-}" ]; then cat ${AERO_DIAG_TABLE} >> diag_table fi @@ -44,7 +45,7 @@ cat $DIAG_TABLE_APPEND >> diag_table $NCP $DATA_TABLE data_table # build field_table -if [ ! -z "${AERO_FIELD_TABLE}" ]; then +if [ ! -z "${AERO_FIELD_TABLE:-}" ]; then nrec=$( cat ${FIELD_TABLE} | wc -l ) prec=${nrec} if (( dnats > 0 )); then @@ -69,20 +70,20 @@ cat > input.nml < input.nml <> input.nml << EOF dry_mass=${dry_mass:-98320.0} consv_te = $consv_te do_sat_adj = ${do_sat_adj:-".false."} + fast_tau_w_sec = ${fast_tau_w_sec:-"0.2"} consv_am = .false. fill = .true. dwind_2d = .false. @@ -178,7 +180,7 @@ cat >> input.nml << EOF agrid_vel_rst = ${agrid_vel_rst:-".true."} read_increment = $read_increment res_latlon_dynamics = $res_latlon_dynamics - $fv_core_nml + ${fv_core_nml-} / &external_ic_nml @@ -187,7 +189,7 @@ cat >> input.nml << EOF gfs_dwinds = $gfs_dwinds checker_tr = .false. nt_checker = 0 - $external_ic_nml + ${external_ic_nml-} / &gfs_physics_nml @@ -223,16 +225,16 @@ EOF do_mynnsfclay = ${do_mynnsfclay:-".false."} icloud_bl = ${icloud_bl:-"1"} tke_budget = ${tke_budget:-"0"} - bl_mynn_tkeadvect = ${bl_mynn_tkeadvect:-".true."} - bl_mynn_cloudpdf = ${bl_mynn_cloudpdf:-"2"} - bl_mynn_mixlength = ${bl_mynn_mixlength:-"1"} - bl_mynn_edmf = ${bl_mynn_edmf:-"1"} - bl_mynn_edmf_mom = ${bl_mynn_edmf_mom:-"1"} - bl_mynn_edmf_tke = ${bl_mynn_edmf_tke:-"0"} - bl_mynn_cloudmix = ${bl_mynn_cloudmix:-"1"} - bl_mynn_mixqt = ${bl_mynn_mixqt:-"0"} - bl_mynn_output = ${bl_mynn_output:-"0"} - bl_mynn_closure = ${bl_mynn_closure:-"2.6"} + bl_mynn_tkeadvect = ${bl_mynn_tkeadvect:=".true."} + bl_mynn_cloudpdf = ${bl_mynn_cloudpdf:="2"} + bl_mynn_mixlength = ${bl_mynn_mixlength:="1"} + bl_mynn_edmf = ${bl_mynn_edmf:="1"} + bl_mynn_edmf_mom = ${bl_mynn_edmf_mom:="1"} + bl_mynn_edmf_tke = ${bl_mynn_edmf_tke:="0"} + bl_mynn_cloudmix = ${bl_mynn_cloudmix:="1"} + bl_mynn_mixqt = ${bl_mynn_mixqt:="0"} + bl_mynn_output = ${bl_mynn_output:="0"} + bl_mynn_closure = ${bl_mynn_closure:="2.6"} do_ugwp = ${do_ugwp:-".false."} do_tofd = ${do_tofd:-".true."} gwd_opt = ${gwd_opt:-"2"} @@ -303,15 +305,16 @@ EOF icloud_bl = ${icloud_bl:-"1"} tke_budget = ${tke_budget:-"0"} bl_mynn_tkeadvect = ${bl_mynn_tkeadvect:-".true."} - bl_mynn_cloudpdf = ${bl_mynn_cloudpdf:-"2"} - bl_mynn_mixlength = ${bl_mynn_mixlength:-"1"} + bl_mynn_cloudpdf = ${bl_mynn_cloudpdf:="2"} + bl_mynn_mixlength = ${bl_mynn_mixlength:="1"} bl_mynn_edmf = ${bl_mynn_edmf:-"1"} bl_mynn_edmf_mom = ${bl_mynn_edmf_mom:-"1"} - bl_mynn_edmf_tke = ${bl_mynn_edmf_tke:-"0"} - bl_mynn_cloudmix = ${bl_mynn_cloudmix:-"1"} - bl_mynn_mixqt = ${bl_mynn_mixqt:-"0"} - bl_mynn_output = ${bl_mynn_output:-"0"} - bl_mynn_closure = ${bl_mynn_closure:-"2.6"} + bl_mynn_edmf_tke = ${bl_mynn_edmf_tke:="0"} + bl_mynn_cloudmix = ${bl_mynn_cloudmix:="1"} + bl_mynn_mixqt = ${bl_mynn_mixqt:="0"} + bl_mynn_output = ${bl_mynn_output:="0"} + bl_mynn_closure = ${bl_mynn_closure:="2.6"} + lcnorm = ${lcnorm:-".true."} do_ugwp = ${do_ugwp:-".false."} do_tofd = ${do_tofd:-".false."} gwd_opt = ${gwd_opt:-"2"} @@ -398,6 +401,7 @@ cat >> input.nml <> input.nml <> input.nml << EOF fscav_aero = ${fscav_aero:-'*:0.0'} EOF @@ -454,9 +458,9 @@ cat >> input.nml <> input.nml << EOF - $gfs_physics_nml + / EOF @@ -597,12 +602,12 @@ cat >> input.nml <> input.nml <> input.nml <> input.nml << EOF - $nam_stochy_nml + ${nam_stochy_nml:-} / EOF @@ -711,13 +716,13 @@ EOF ISEED_LNDP = ${ISEED_LNDP:-$ISEED} lndp_var_list = ${lndp_var_list} lndp_prt_list = ${lndp_prt_list} - $nam_sfcperts_nml + ${nam_sfcperts_nml:-} / EOF else cat >> input.nml << EOF &nam_sfcperts - $nam_sfcperts_nml + ${nam_sfcperts_nml:-} / EOF fi diff --git a/ush/parsing_namelists_MOM6.sh b/ush/parsing_namelists_MOM6.sh index 617c774483..add7090fe7 100755 --- a/ush/parsing_namelists_MOM6.sh +++ b/ush/parsing_namelists_MOM6.sh @@ -1,73 +1,20 @@ +#! /usr/bin/env bash MOM6_namelists(){ # MOM6 namelists generation -OCNRES=${OCNRES:-"025"} -MOM_INPUT=MOM_input_template_$OCNRES - -#Set to False for restart reproducibility -MOM6_USE_LI2016=${MOM6_USE_LI2016:-'True'} -MOM6_THERMO_SPAN=${MOM6_THERMO_SPAN:-'False'} -MOM6_ALLOW_LANDMASK_CHANGES=${MOM6_ALLOW_LANDMASK_CHANGES:-'False'} - -DO_OCN_SPPT=${DO_OCN_SPPT:-'False'} -PERT_EPBL=${PERT_EPBL:-'False'} - -MOM_IAU_HRS=${MOM_IAU_HRS:-'3.0'} - -if [ $cplwav = ".true." ] ; then - MOM6_USE_WAVES='True' -else - MOM6_USE_WAVES='False' -fi - -if [ $OCNRES = '025' ]; then - NX_GLB=1440 - NY_GLB=1080 - DT_DYNAM_MOM6='900' - DT_THERM_MOM6='1800' - CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" - FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" - MOM6_RIVER_RUNOFF='True' - MOM6_RESTART_SETTING="r" -elif [ $OCNRES = '050' ]; then - NX_GLB=720 - NY_GLB=576 - DT_DYNAM_MOM6='1800' - DT_THERM_MOM6='3600' - CHLCLIM="seawifs-clim-1997-2010.${NX_GLB}x${NY_GLB}.v20180328.nc" - FRUNOFF="runoff.daitren.clim.${NX_GLB}x${NY_GLB}.v20180328.nc" - MOM6_RESTART_SETTING='n' - MOM6_RIVER_RUNOFF='True' -elif [ $OCNRES = '100' ]; then - NX_GLB=360 - NY_GLB=320 - DT_DYNAM_MOM6='1800' - DT_THERM_MOM6='3600' - FRUNOFF="" - CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" - MOM6_RESTART_SETTING='n' - MOM6_RIVER_RUNOFF='False' -elif [ $OCNRES = '400' ]; then - NX_GLB=90 - NY_GLB=80 - DT_DYNAM_MOM6='1800' - DT_THERM_MOM6='3600' - FRUNOFF="" - CHLCLIM="seawifs_1998-2006_smoothed_2X.nc" - MOM6_RESTART_SETTING='n' - MOM6_RIVER_RUNOFF='False' +if [[ "${cplwav}" == ".true." ]] ; then + local MOM6_USE_WAVES='True' else - echo "FATAL ERROR: do not have MOM6 settings defined for desired OCNRES=$OCNRES" - exit 1 + local MOM6_USE_WAVES='False' fi cat >> input.nml <> input.nml <> input.nml <> input.nml < $DATA/INPUT/MOM_input -rm $DATA/INPUT/MOM_input_template_$OCNRES +${NCP} -pf "${HOMEgfs}/parm/mom6/MOM_input_template_${OCNRES}" "${DATA}/INPUT/" +sed -e "s/@\[DT_THERM_MOM6\]/${DT_THERM_MOM6}/g" \ + -e "s/@\[DT_DYNAM_MOM6\]/${DT_DYNAM_MOM6}/g" \ + -e "s/@\[MOM6_RIVER_RUNOFF\]/${MOM6_RIVER_RUNOFF}/g" \ + -e "s/@\[MOM6_THERMO_SPAN\]/${MOM6_THERMO_SPAN}/g" \ + -e "s/@\[MOM6_USE_LI2016\]/${MOM6_USE_LI2016}/g" \ + -e "s/@\[MOM6_USE_WAVES\]/${MOM6_USE_WAVES}/g" \ + -e "s/@\[MOM6_ALLOW_LANDMASK_CHANGES\]/${MOM6_ALLOW_LANDMASK_CHANGES}/g" \ + -e "s/@\[NX_GLB\]/${NX_GLB}/g" \ + -e "s/@\[NY_GLB\]/${NY_GLB}/g" \ + -e "s/@\[CHLCLIM\]/${CHLCLIM}/g" \ + -e "s/@\[DO_OCN_SPPT\]/${OCN_SPPT}/g" \ + -e "s/@\[PERT_EPBL\]/${PERT_EPBL}/g" \ + -e "s/@\[ODA_INCUPD_NHOURS\]/${ODA_INCUPD_NHOURS}/g" \ + -e "s/@\[ODA_INCUPD\]/${ODA_INCUPD}/g" "${DATA}/INPUT/MOM_input_template_${OCNRES}" > "${DATA}/INPUT/MOM_input" +rm "${DATA}/INPUT/MOM_input_template_${OCNRES}" #data table for runoff: -DATA_TABLE=${DATA_TABLE:-$PARM_FV3DIAG/data_table} -$NCP $DATA_TABLE $DATA/data_table_template -sed -e "s/@\[FRUNOFF\]/$FRUNOFF/g" $DATA/data_table_template > $DATA/data_table -rm $DATA/data_table_template +DATA_TABLE=${DATA_TABLE:-${PARM_FV3DIAG}/data_table} +${NCP} "${DATA_TABLE}" "${DATA}/data_table_template" +sed -e "s/@\[FRUNOFF\]/${FRUNOFF}/g" "${DATA}/data_table_template" > "${DATA}/data_table" +rm "${DATA}/data_table_template" } diff --git a/ush/parsing_namelists_WW3.sh b/ush/parsing_namelists_WW3.sh new file mode 100755 index 0000000000..c53af9f18f --- /dev/null +++ b/ush/parsing_namelists_WW3.sh @@ -0,0 +1,326 @@ +#! /usr/bin/env bash + +WW3_namelists(){ + +# WW3 namelists/input generation + + FHMAX_WAV=${FHMAX_WAV:-384} + + # Date and time stuff + + # Beginning time for outpupt may differ from SDATE if DOIAU=YES + export date=$PDY + export YMDH=${PDY}${cyc} + # Roll back $IAU_FHROT hours of DOIAU=YES + if [ "$DOIAU" = "YES" ] + then + WAVHINDH=$(( WAVHINDH + IAU_FHROT )) + fi + # Set time stamps for model start and output + # For special case when IAU is on but this is an initial half cycle + if [ $IAU_OFFSET = 0 ]; then + ymdh_beg=$YMDH + else + ymdh_beg=$($NDATE -$WAVHINDH $YMDH) + fi + time_beg="$(echo $ymdh_beg | cut -c1-8) $(echo $ymdh_beg | cut -c9-10)0000" + ymdh_end=$($NDATE $FHMAX_WAV $YMDH) + time_end="$(echo $ymdh_end | cut -c1-8) $(echo $ymdh_end | cut -c9-10)0000" + ymdh_beg_out=$YMDH + time_beg_out="$(echo $ymdh_beg_out | cut -c1-8) $(echo $ymdh_beg_out | cut -c9-10)0000" + + # Restart file times (already has IAU_FHROT in WAVHINDH) + RSTOFFSET=$(( ${WAVHCYC} - ${WAVHINDH} )) + # Update restart time is added offset relative to model start + RSTOFFSET=$(( ${RSTOFFSET} + ${RSTIOFF_WAV} )) + ymdh_rst_ini=$($NDATE ${RSTOFFSET} $YMDH) + RST2OFFSET=$(( DT_2_RST_WAV / 3600 )) + ymdh_rst2_ini=$($NDATE ${RST2OFFSET} $YMDH) # DT2 relative to first-first-cycle restart file + # First restart file for cycling + time_rst_ini="$(echo $ymdh_rst_ini | cut -c1-8) $(echo $ymdh_rst_ini | cut -c9-10)0000" + if [ ${DT_1_RST_WAV} = 1 ]; then + time_rst1_end=${time_rst_ini} + else + RST1OFFSET=$(( DT_1_RST_WAV / 3600 )) + ymdh_rst1_end=$($NDATE $RST1OFFSET $ymdh_rst_ini) + time_rst1_end="$(echo $ymdh_rst1_end | cut -c1-8) $(echo $ymdh_rst1_end | cut -c9-10)0000" + fi + # Second restart file for checkpointing + if [ "${RSTTYPE_WAV}" = "T" ]; then + time_rst2_ini="$(echo $ymdh_rst2_ini | cut -c1-8) $(echo $ymdh_rst2_ini | cut -c9-10)0000" + time_rst2_end=$time_end + # Condition for gdas run or any other run when checkpoint stamp is > ymdh_end + if [ $ymdh_rst2_ini -ge $ymdh_end ]; then + ymdh_rst2_ini=$($NDATE 3 $ymdh_end) + time_rst2_ini="$(echo $ymdh_rst2_ini | cut -c1-8) $(echo $ymdh_rst2_ini | cut -c9-10)0000" + time_rst2_end=$time_rst2_ini + fi + else + time_rst2_ini="$" + time_rst2_end= + DT_2_RST_WAV= + fi + + + set +x + echo ' ' + echo 'Times in wave model format :' + echo '----------------------------' + echo " date / cycle : $date $cycle" + echo " starting time : $time_beg" + echo " ending time : $time_end" + echo ' ' + set_trace + + + +# --------------------------------------------------------------------------- # +# Create ww3_multi/shel.inp + + if [ $waveMULTIGRID = ".true." ]; then + # ww3_multi template + if [ -f $PARMwave/ww3_multi.${NET}.inp.tmpl ]; then + cp $PARMwave/ww3_multi.${NET}.inp.tmpl ww3_multi.inp.tmpl + fi + if [ ! -f ww3_multi.inp.tmpl ]; then + echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 MULTI INPUT FILE" + exit 11 + fi + else + # ww3_multi template + if [ -f $PARMwave/ww3_shel.${NET}.inp.tmpl ]; then + cp $PARMwave/ww3_shel.${NET}.inp.tmpl ww3_shel.inp.tmpl + fi + if [ ! -f ww3_shel.inp.tmpl ]; then + echo "ABNORMAL EXIT: NO TEMPLATE FOR WW3 SHEL INPUT FILE" + exit 12 + fi + fi + +# Buoy location file + + if [ -f $PARMwave/wave_${NET}.buoys ] + then + cp $PARMwave/wave_${NET}.buoys buoy.loc + fi + + if [ -f buoy.loc ] + then + set +x + echo " buoy.loc copied ($PARMwave/wave_${NET}.buoys)." + set_trace + else + echo " FATAL ERROR : buoy.loc ($PARMwave/wave_${NET}.buoys) NOT FOUND" + exit 12 + fi + + + +if [ $waveMULTIGRID = ".true." ]; then +#multi + +# Initialize inp file parameters + NFGRIDS=0 + NMGRIDS=0 + CPLILINE='$' + ICELINE='$' + ICEFLAG='no' + CURRLINE='$' + CURRFLAG='no' + WINDLINE='$' + WINDFLAG='no' + UNIPOINTS='$' + +# Check for required inputs and coupling options + if [ $waveuoutpGRD ] + then + UNIPOINTS="'$waveuoutpGRD'" + fi + +# Check if waveesmfGRD is set + if [ ${waveesmfGRD} ] + then + NFGRIDS=$(expr $NFGRIDS + 1) + fi + + case ${WW3ATMINP} in + 'YES' ) + NFGRIDS=$(expr $NFGRIDS + 1) + WINDLINE=" '$WAVEWND_FID' F F T F F F F F F" + WINDFLAG="$WAVEWND_FID" + ;; + 'CPL' ) + WNDIFLAG='T' + if [ ${waveesmfGRD} ] + then + WINDFLAG="CPL:${waveesmfGRD}" + CPLILINE=" '${waveesmfGRD}' F F T F F F F F F" + else + WINDFLAG="CPL:native" + fi + ;; + esac + + case ${WW3ICEINP} in + 'YES' ) + NFGRIDS=$(expr $NFGRIDS + 1) + ICEIFLAG='T' + ICELINE=" '$WAVEICE_FID' F F F T F F F F F" + ICEFLAG="$WAVEICE_FID" + ;; + 'CPL' ) + ICEIFLAG='T' + if [ ${waveesmfGRD} ] + then + ICEFLAG="CPL:${waveesmfGRD}" + CPLILINE=" '${waveesmfGRD}' F F ${WNDIFLAG} T F F F F F" + else + ICEFLAG="CPL:native" + fi + ;; + esac + + case ${WW3CURINP} in + 'YES' ) + if [ "$WAVECUR_FID" != "$WAVEICE_FID" ]; then + NFGRIDS=$(expr $NFGRIDS + 1) + CURRLINE=" '$WAVECUR_FID' F T F F F F F F F" + CURRFLAG="$WAVECUR_FID" + else # cur fields share the same grid as ice grid + ICELINE=" '$WAVEICE_FID' F T F ${ICEIFLAG} F F F F F" + CURRFLAG="$WAVEICE_FID" + fi + ;; + 'CPL' ) + CURIFLAG='T' + if [ ${waveesmfGRD} ] + then + CURRFLAG="CPL:${waveesmfGRD}" + CPLILINE=" '${waveesmfGRD}' F T ${WNDIFLAG} ${ICEFLAG} F F F F F" + else + CURRFLAG="CPL:native" + fi + ;; + esac + + unset agrid + agrid= + gline= + GRDN=0 +# grdGRP=1 # Single group for now + for grid in ${waveGRD} + do + GRDN=$(expr ${GRDN} + 1) + agrid=( ${agrid[*]} ${grid} ) + NMGRIDS=$(expr $NMGRIDS + 1) + gridN=$(echo $waveGRDN | awk -v i=$GRDN '{print $i}') + gridG=$(echo $waveGRDG | awk -v i=$GRDN '{print $i}') + gline="${gline}'${grid}' 'no' 'CURRFLAG' 'WINDFLAG' 'ICEFLAG' 'no' 'no' 'no' 'no' 'no' ${gridN} ${gridG} 0.00 1.00 F\n" + done + gline="${gline}\$" + echo $gline + + sed -e "s/NFGRIDS/$NFGRIDS/g" \ + -e "s/NMGRIDS/${NMGRIDS}/g" \ + -e "s/FUNIPNT/${FUNIPNT}/g" \ + -e "s/IOSRV/${IOSRV}/g" \ + -e "s/FPNTPROC/${FPNTPROC}/g" \ + -e "s/FGRDPROC/${FGRDPROC}/g" \ + -e "s/OUTPARS/${OUTPARS_WAV}/g" \ + -e "s/CPLILINE/${CPLILINE}/g" \ + -e "s/UNIPOINTS/${UNIPOINTS}/g" \ + -e "s/GRIDLINE/${gline}/g" \ + -e "s/ICELINE/$ICELINE/g" \ + -e "s/CURRLINE/$CURRLINE/g" \ + -e "s/WINDLINE/$WINDLINE/g" \ + -e "s/ICEFLAG/$ICEFLAG/g" \ + -e "s/CURRFLAG/$CURRFLAG/g" \ + -e "s/WINDFLAG/$WINDFLAG/g" \ + -e "s/RUN_BEG/$time_beg/g" \ + -e "s/RUN_END/$time_end/g" \ + -e "s/OUT_BEG/$time_beg_out/g" \ + -e "s/OUT_END/$time_end/g" \ + -e "s/DTFLD/ $DTFLD_WAV/g" \ + -e "s/FLAGMASKCOMP/ $FLAGMASKCOMP/g" \ + -e "s/FLAGMASKOUT/ $FLAGMASKOUT/g" \ + -e "s/GOFILETYPE/ $GOFILETYPE/g" \ + -e "s/POFILETYPE/ $POFILETYPE/g" \ + -e "s/DTPNT/ $DTPNT_WAV/g" \ + -e "/BUOY_FILE/r buoy.loc" \ + -e "s/BUOY_FILE/DUMMY/g" \ + -e "s/RST_BEG/$time_rst_ini/g" \ + -e "s/RSTTYPE/$RSTTYPE_WAV/g" \ + -e "s/RST_2_BEG/$time_rst2_ini/g" \ + -e "s/DTRST/$DT_1_RST_WAV/g" \ + -e "s/DT_2_RST/$DT_2_RST_WAV/g" \ + -e "s/RST_END/$time_rst1_end/g" \ + -e "s/RST_2_END/$time_rst2_end/g" \ + ww3_multi.inp.tmpl | \ + sed -n "/DUMMY/!p" > ww3_multi.inp + + rm -f ww3_multi.inp.tmpl buoy.loc + + cat ww3_multi.inp + +else + #ww3_shel + +# Initialize inp file parameters + ICELINE='F F' + CURRLINE='F F' + WINDLINE='F F' + + case ${WW3ATMINP} in + 'YES' ) + WINDLINE="T F";; + 'CPL' ) + WINDLINE="C F";; + esac + + case ${WW3ICEINP} in + 'YES' ) + ICELINE="T F";; + 'CPL' ) + ICELINE="C F";; + esac + + case ${WW3CURINP} in + 'YES' ) + CURRLINE="T F";; + 'CPL' ) + CURRLINE="C F";; + esac + + sed -e "s/IOSRV/${IOSRV}/g" \ + -e "s/OUTPARS/${OUTPARS_WAV}/g" \ + -e "s/ICELINE/$ICELINE/g" \ + -e "s/CURRLINE/$CURRLINE/g" \ + -e "s/WINDLINE/$WINDLINE/g" \ + -e "s/RUN_BEG/$time_beg/g" \ + -e "s/RUN_END/$time_end/g" \ + -e "s/OUT_BEG/$time_beg_out/g" \ + -e "s/OUT_END/$time_end/g" \ + -e "s/DTFLD/ $DTFLD_WAV/g" \ + -e "s/GOFILETYPE/ $GOFILETYPE/g" \ + -e "s/POFILETYPE/ $POFILETYPE/g" \ + -e "s/DTPNT/ $DTPNT_WAV/g" \ + -e "s/DTPNT/ $DTPNT_WAV/g" \ + -e "/BUOY_FILE/r buoy.loc" \ + -e "s/BUOY_FILE/DUMMY/g" \ + -e "s/RST_BEG/$time_rst_ini/g" \ + -e "s/RSTTYPE/$RSTTYPE_WAV/g" \ + -e "s/RST_2_BEG/$time_rst2_ini/g" \ + -e "s/DTRST/$DT_1_RST_WAV/g" \ + -e "s/DT_2_RST/$DT_2_RST_WAV/g" \ + -e "s/RST_END/$time_rst1_end/g" \ + -e "s/RST_2_END/$time_rst2_end/g" \ + ww3_shel.inp.tmpl | \ + sed -n "/DUMMY/!p" > ww3_shel.inp + + rm -f ww3_shel.inp.tmpl buoy.loc + + cat ww3_shel.inp + +fi + +} diff --git a/ush/preamble.sh b/ush/preamble.sh new file mode 100644 index 0000000000..be64684aa8 --- /dev/null +++ b/ush/preamble.sh @@ -0,0 +1,157 @@ +#! /usr/bin/env bash + +####### +# Preamble script to be SOURCED at the beginning of every script. Sets +# useful PS4 and optionally turns on set -x and set -eu. Also sets up +# crude script timing and provides a postamble that runs on exit. +# +# Syntax: +# preamble.sh [id] +# +# Aruguments: +# id: Optional identifier string. Use when running the same script +# multiple times in the same job (e.g. MPMD) +# +# Input environment variables: +# TRACE (YES/NO): Whether to echo every command (set -x) [default: "YES"] +# STRICT (YES/NO): Whether to exit immediately on error or undefined variable +# (set -eu) [default: "YES"] +# +####### +set +x +if (( $# > 0 )); then + id="(${1})" +else + id="" +fi + +# Record the start time so we can calculate the elapsed time later +start_time=$(date +%s) + +# Get the base name of the calling script +_calling_script=$(basename "${BASH_SOURCE[1]}") + +# Announce the script has begun +start_time_human=$(date -d"@${start_time}" -u) +echo "Begin ${_calling_script} at ${start_time_human}" + +declare -rx PS4='+ $(basename ${BASH_SOURCE[0]:-${FUNCNAME[0]:-"Unknown"}})[${LINENO}]'"${id}: " + +set_strict() { + if [[ ${STRICT:-"YES"} == "YES" ]]; then + # Exit on error and undefined variable + set -eu + fi +} + +set_trace() { + # Print the script name and line number of each command as it is + # executed when using trace. + if [[ ${TRACE:-"YES"} == "YES" ]]; then + set -x + fi +} + +postamble() { + # + # Commands to execute when a script ends. + # + # Syntax: + # postamble script start_time rc + # + # Arguments: + # script: name of the script ending + # start_time: start time of script (in seconds) + # rc: the exit code of the script + # + + set +x + script="${1}" + start_time="${2}" + rc="${3}" + + # Calculate the elapsed time + end_time=$(date +%s) + end_time_human=$(date -d@"${end_time}" -u +%H:%M:%S) + elapsed_sec=$((end_time - start_time)) + elapsed=$(date -d@"${elapsed_sec}" -u +%H:%M:%S) + + # Announce the script has ended, then pass the error code up + echo "End ${script} at ${end_time_human} with error code ${rc:-0} (time elapsed: ${elapsed})" + exit "${rc}" +} + +# Place the postamble in a trap so it is always called no matter how the script exits +# Shellcheck: Turn off warning about substitions at runtime instead of signal time +# shellcheck disable=SC2064 +trap "postamble ${_calling_script} ${start_time} \$?" EXIT +# shellcheck disable= + +function generate_com() { + # + # Generate a list COM variables from a template by substituting in env variables. + # + # Each argument must have a corresponding template with the name ${ARG}_TMPL. Any + # variables in the template are replaced with their values. Undefined variables + # are just removed without raising an error. + # + # Accepts as options `-r` and `-x`, which do the same thing as the same options in + # `declare`. Variables are automatically marked as `-g` so the variable is visible + # in the calling script. + # + # Syntax: + # generate_com [-rx] $var1[:$tmpl1] [$var2[:$tmpl2]] [...]] + # + # options: + # -r: Make variable read-only (same as `decalre -r`) + # -x: Mark variable for export (same as `declare -x`) + # var1, var2, etc: Variable names whose values will be generated from a template + # and declared + # tmpl1, tmpl2, etc: Specify the template to use (default is "${var}_TMPL") + # + # Examples: + # # Current cycle and RUN, implicitly using template COM_ATMOS_ANALYSIS_TMPL + # YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_ANALYSIS + # + # # Previous cycle and gdas using an explicit template + # RUN=${GDUMP} YMD=${gPDY} HH=${gcyc} generate_com -rx \ + # COM_ATMOS_HISTORY_PREV:COM_ATMOS_HISTORY_TMPL + # + # # Current cycle and COM for first member + # MEMDIR='mem001' YMD=${PDY} HH=${cyc} generate_com -rx COM_ATMOS_HISTORY + # + if [[ ${DEBUG_WORKFLOW:-"NO"} == "NO" ]]; then set +x; fi + local opts="-g" + local OPTIND=1 + while getopts "rx" option; do + opts="${opts}${option}" + done + shift $((OPTIND-1)) + + for input in "$@"; do + IFS=':' read -ra args <<< "${input}" + local com_var="${args[0]}" + local template + local value + if (( ${#args[@]} > 1 )); then + template="${args[1]}" + else + template="${com_var}_TMPL" + fi + if [[ ! -v "${template}" ]]; then + echo "FATAL ERROR in generate_com: Requested template ${template} not defined!" + exit 2 + fi + value=$(echo "${!template}" | envsubst) + # shellcheck disable=SC2086 + declare ${opts} "${com_var}"="${value}" + echo "generate_com :: ${com_var}=${value}" + done + set_trace +} +# shellcheck disable= +declare -xf generate_com + +# Turn on our settings +set_strict +set_trace diff --git a/ush/python/pygfs/__init__.py b/ush/python/pygfs/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ush/python/pygfs/task/__init__.py b/ush/python/pygfs/task/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ush/python/pygfs/task/aero_analysis.py b/ush/python/pygfs/task/aero_analysis.py new file mode 100644 index 0000000000..e3c9ad50a2 --- /dev/null +++ b/ush/python/pygfs/task/aero_analysis.py @@ -0,0 +1,304 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +from logging import getLogger +from typing import Dict, List, Any + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta +from pygw.fsutils import rm_p, chdir +from pygw.timetools import to_fv3time +from pygw.yaml_file import YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class AerosolAnalysis(Analysis): + """ + Class for global aerosol analysis tasks + """ + @logit(logger, name="AerosolAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config['CASE'][1:]) + _res_enkf = int(self.config['CASE_ENS'][1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H") / 2) + _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'npx_anl': _res_enkf + 1, + 'npy_anl': _res_enkf + 1, + 'npz_anl': self.config['LEVS'] - 1, + 'AERO_WINDOW_BEGIN': _window_begin, + 'AERO_WINDOW_LENGTH': f"PT{self.config['assim_freq']}H", + 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'fv3jedi_yaml': _fv3jedi_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self: Analysis) -> None: + """Initialize a global aerosol analysis + + This method will initialize a global aerosol analysis using JEDI. + This includes: + - staging CRTM fix files + - staging FV3-JEDI fix files + - staging B error files + - staging model backgrounds + - generating a YAML file for the JEDI executable + - creating output directories + """ + super().initialize() + + # stage CRTM fix files + crtm_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'parm_gdas', 'aero_crtm_coeff.yaml') + logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") + crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + FileHandler(crtm_fix_list).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'parm_gdas', 'aero_jedi_fix.yaml') + logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage berror files + # copy BUMP files, otherwise it will assume ID matrix + if self.task_config.get('STATICB_TYPE', 'identity') in ['bump']: + FileHandler(self.get_berror_dict(self.task_config)).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict(AttrDict(self.task_config, **self.task_config))).sync() + + # generate variational YAML file + logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") + varda_yaml = parse_j2yaml(self.task_config['AEROVARYAML'], self.task_config) + save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config['DATA'], 'anl'), + os.path.join(self.task_config['DATA'], 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + + @logit(logger) + def execute(self: Analysis) -> None: + + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_AEROANL) + exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def finalize(self: Analysis) -> None: + """Finalize a global aerosol analysis + + This method will finalize a global aerosol analysis using JEDI. + This includes: + - tarring up output diag files and place in ROTDIR + - copying the generated YAML file from initialize to the ROTDIR + - copying the guess files to the ROTDIR + - applying the increments to the original RESTART files + - moving the increment files to the ROTDIR + + Please note that some of these steps are temporary and will be modified + once the model is able to read aerosol tracer increments. + """ + # ---- tar up diags + # path of output tar statfile + aerostat = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['APREFIX']}aerostat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config['DATA'], 'diags', 'diag*nc4')) + + # gzip the files first + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + with tarfile.open(aerostat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # copy full YAML from executable to ROTDIR + src = os.path.join(self.task_config['DATA'], f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f"{self.task_config['CDUMP']}.t{self.runtime_config['cyc']:02d}z.aerovar.yaml") + yaml_copy = { + 'mkdir': [self.task_config.COM_CHEM_ANALYSIS], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # ---- NOTE below is 'temporary', eventually we will not be using FMS RESTART formatted files + # ---- all of the rest of this method will need to be changed but requires model and JEDI changes + # ---- copy RESTART fv_tracer files for future reference + template = '{}.fv_tracer.res.tile{}.nc'.format(to_fv3time(self.task_config.current_cycle), '{tilenum}') + bkglist = [] + for itile in range(1, self.task_config.ntiles + 1): + tracer = template.format(tilenum=itile) + src = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, tracer) + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, f'aeroges.{tracer}') + bkglist.append([src, dest]) + FileHandler({'copy': bkglist}).sync() + + # ---- add increments to RESTART files + logger.info('Adding increments to RESTART files') + self._add_fms_cube_sphere_increments() + + # ---- move increments to ROTDIR + logger.info('Moving increments to ROTDIR') + template = f'aeroinc.{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' + inclist = [] + for itile in range(1, self.task_config.ntiles + 1): + tracer = template.format(tilenum=itile) + src = os.path.join(self.task_config.DATA, 'anl', tracer) + dest = os.path.join(self.task_config.COM_CHEM_ANALYSIS, tracer) + inclist.append([src, dest]) + FileHandler({'copy': inclist}).sync() + + def clean(self): + super().clean() + + @logit(logger) + def _add_fms_cube_sphere_increments(self: Analysis) -> None: + """This method adds increments to RESTART files to get an analysis + NOTE this is only needed for now because the model cannot read aerosol increments. + This method will be assumed to be deprecated before this is implemented operationally + """ + # only need the fv_tracer files + template = f'{to_fv3time(self.task_config.current_cycle)}.fv_tracer.res.tile{{tilenum}}.nc' + inc_template = os.path.join(self.task_config.DATA, 'anl', 'aeroinc.' + template) + bkg_template = os.path.join(self.task_config.COM_ATMOS_RESTART_PREV, template) + # get list of increment vars + incvars_list_path = os.path.join(self.task_config['HOMEgfs'], 'parm', 'parm_gdas', 'aeroanl_inc_vars.yaml') + incvars = YAMLFile(path=incvars_list_path)['incvars'] + super().add_fv3_increments(inc_template, bkg_template, incvars) + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, core, tracer) + that are needed for global aerosol DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + + # get FV3 RESTART files, this will be a lot simpler when using history files + rst_dir = task_config.COM_ATMOS_RESTART_PREV + run_dir = os.path.join(task_config['DATA'], 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # aerosol DA needs coupler + basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # aerosol DA only needs core/tracer + for ftype in ['core', 'tracer']: + template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist, + } + return bkg_dict + + @logit(logger) + def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of background error files to copy + + This method will construct a dictionary of BUMP background error files + for global aerosol DA and return said dictionary for use by the FileHandler class. + This dictionary contains coupler and fv_tracer files + for correlation and standard deviation as well as NICAS localization. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary containing the list of background error files to copy for FileHandler + """ + # aerosol static-B needs nicas, cor_rh, cor_rv and stddev files. + b_dir = config.BERROR_DATA_DIR + b_datestr = to_fv3time(config.BERROR_DATE) + berror_list = [] + + for ftype in ['cor_rh', 'cor_rv', 'stddev']: + coupler = f'{b_datestr}.{ftype}.coupler.res' + berror_list.append([ + os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) + ]) + template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + tracer = template.format(tilenum=itile) + berror_list.append([ + os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) + ]) + + nproc = config.ntiles * config.layout_x * config.layout_y + for nn in range(1, nproc + 1): + berror_list.append([ + os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), + os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') + ]) + berror_dict = { + 'mkdir': [os.path.join(config.DATA, 'berror')], + 'copy': berror_list, + } + return berror_dict diff --git a/ush/python/pygfs/task/analysis.py b/ush/python/pygfs/task/analysis.py new file mode 100644 index 0000000000..7c24c9cbdb --- /dev/null +++ b/ush/python/pygfs/task/analysis.py @@ -0,0 +1,201 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from netCDF4 import Dataset +from typing import List, Dict, Any + +from pygw.yaml_file import YAMLFile, parse_j2yaml, parse_yamltmpl +from pygw.file_utils import FileHandler +from pygw.template import Template, TemplateConstants +from pygw.logger import logit +from pygw.task import Task + +logger = getLogger(__name__.split('.')[-1]) + + +class Analysis(Task): + """Parent class for GDAS tasks + + The Analysis class is the parent class for all + Global Data Assimilation System (GDAS) tasks + directly related to peforming an analysis + """ + + def __init__(self, config: Dict[str, Any]) -> None: + super().__init__(config) + self.config.ntiles = 6 + + def initialize(self) -> None: + super().initialize() + # all analyses need to stage observations + obs_dict = self.get_obs_dict() + FileHandler(obs_dict).sync() + + # some analyses need to stage bias corrections + bias_dict = self.get_bias_dict() + FileHandler(bias_dict).sync() + + # link jedi executable to run directory + self.link_jediexe() + + @logit(logger) + def get_obs_dict(self: Task) -> Dict[str, Any]: + """Compile a dictionary of observation files to copy + + This method uses the OBS_LIST configuration variable to generate a dictionary + from a list of YAML files that specify what observation files are to be + copied to the run directory from the observation input directory + + Parameters + ---------- + + Returns + ---------- + obs_dict: Dict + a dictionary containing the list of observation files to copy for FileHandler + """ + logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}") + obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config) + logger.debug(f"obs_list_config: {obs_list_config}") + # get observers from master dictionary + observers = obs_list_config['observers'] + copylist = [] + for ob in observers: + obfile = ob['obs space']['obsdatain']['engine']['obsfile'] + basename = os.path.basename(obfile) + copylist.append([os.path.join(self.task_config['COM_OBS'], basename), obfile]) + obs_dict = { + 'mkdir': [os.path.join(self.runtime_config['DATA'], 'obs')], + 'copy': copylist + } + return obs_dict + + @logit(logger) + def get_bias_dict(self: Task) -> Dict[str, Any]: + """Compile a dictionary of observation files to copy + + This method uses the OBS_LIST configuration variable to generate a dictionary + from a list of YAML files that specify what observation bias correction files + are to be copied to the run directory from the observation input directory + + Parameters + ---------- + + Returns + ---------- + bias_dict: Dict + a dictionary containing the list of observation bias files to copy for FileHandler + """ + logger.debug(f"OBS_LIST: {self.task_config['OBS_LIST']}") + obs_list_config = parse_j2yaml(self.task_config["OBS_LIST"], self.task_config) + logger.debug(f"obs_list_config: {obs_list_config}") + # get observers from master dictionary + observers = obs_list_config['observers'] + copylist = [] + for ob in observers: + if 'obs bias' in ob.keys(): + obfile = ob['obs bias']['input file'] + obdir = os.path.dirname(obfile) + basename = os.path.basename(obfile) + prefix = '.'.join(basename.split('.')[:-2]) + for file in ['satbias.nc4', 'satbias_cov.nc4', 'tlapse.txt']: + bfile = f"{prefix}.{file}" + copylist.append([os.path.join(self.task_config.COM_ATMOS_ANALYSIS_PREV, bfile), os.path.join(obdir, bfile)]) + + bias_dict = { + 'mkdir': [os.path.join(self.runtime_config.DATA, 'bc')], + 'copy': copylist + } + return bias_dict + + @logit(logger) + def add_fv3_increments(self, inc_file_tmpl: str, bkg_file_tmpl: str, incvars: List) -> None: + """Add cubed-sphere increments to cubed-sphere backgrounds + + Parameters + ---------- + inc_file_tmpl : str + template of the FV3 increment file of the form: 'filetype.tile{tilenum}.nc' + bkg_file_tmpl : str + template of the FV3 background file of the form: 'filetype.tile{tilenum}.nc' + incvars : List + List of increment variables to add to the background + """ + + for itile in range(1, self.config.ntiles + 1): + inc_path = inc_file_tmpl.format(tilenum=itile) + bkg_path = bkg_file_tmpl.format(tilenum=itile) + with Dataset(inc_path, mode='r') as incfile, Dataset(bkg_path, mode='a') as rstfile: + for vname in incvars: + increment = incfile.variables[vname][:] + bkg = rstfile.variables[vname][:] + anl = bkg + increment + rstfile.variables[vname][:] = anl[:] + try: + rstfile.variables[vname].delncattr('checksum') # remove the checksum so fv3 does not complain + except (AttributeError, RuntimeError): + pass # checksum is missing, move on + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method is a placeholder for now... will be possibly made generic at a later date + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + bkg_dict = {'foo': 'bar'} + return bkg_dict + + @logit(logger) + def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of background error files to copy + + This method is a placeholder for now... will be possibly made generic at a later date + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary containing the list of background error files to copy for FileHandler + """ + berror_dict = {'foo': 'bar'} + return berror_dict + + @logit(logger) + def link_jediexe(self: Task) -> None: + """Compile a dictionary of background error files to copy + + This method links a JEDI executable to the run directory + + Parameters + ---------- + Task: GDAS task + + Returns + ---------- + None + """ + exe_src = self.task_config.JEDIEXE + + # TODO: linking is not permitted per EE2. Needs work in JEDI to be able to copy the exec. + logger.debug(f"Link executable {exe_src} to DATA/") + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + return diff --git a/ush/python/pygfs/task/atm_analysis.py b/ush/python/pygfs/task/atm_analysis.py new file mode 100644 index 0000000000..3ab0ae3240 --- /dev/null +++ b/ush/python/pygfs/task/atm_analysis.py @@ -0,0 +1,435 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +from logging import getLogger +from typing import Dict, List, Any + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH +from pygw.fsutils import rm_p, chdir +from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class AtmAnalysis(Analysis): + """ + Class for global atm analysis tasks + """ + @logit(logger, name="AtmAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config.CASE[1:]) + _res_anl = int(self.config.CASE_ANL[1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) + _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmvar.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'npx_anl': _res_anl + 1, + 'npy_anl': _res_anl + 1, + 'npz_anl': self.config.LEVS - 1, + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", + 'OPREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'fv3jedi_yaml': _fv3jedi_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self: Analysis) -> None: + """Initialize a global atm analysis + + This method will initialize a global atm analysis using JEDI. + This includes: + - staging CRTM fix files + - staging FV3-JEDI fix files + - staging B error files + - staging model backgrounds + - generating a YAML file for the JEDI executable + - creating output directories + """ + super().initialize() + + # stage CRTM fix files + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") + crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + FileHandler(crtm_fix_list).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage berror files + # copy static background error files, otherwise it will assume ID matrix + logger.debug(f"Stage files for STATICB_TYPE {self.task_config.STATICB_TYPE}") + FileHandler(self.get_berror_dict(self.task_config)).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict(AttrDict(self.task_config))).sync() + + # generate variational YAML file + logger.debug(f"Generate variational YAML file: {self.task_config.fv3jedi_yaml}") + varda_yaml = parse_j2yaml(self.task_config.ATMVARYAML, self.task_config) + save_as_yaml(varda_yaml, self.task_config.fv3jedi_yaml) + logger.info(f"Wrote variational YAML to: {self.task_config.fv3jedi_yaml}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + + @logit(logger) + def execute(self: Analysis) -> None: + + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_ATMANL) + exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_var.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def finalize(self: Analysis) -> None: + """Finalize a global atm analysis + + This method will finalize a global atm analysis using JEDI. + This includes: + - tar output diag files and place in ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - copy the updated bias correction files to ROTDIR + - write UFS model readable atm incrment file + + """ + # ---- tar up diags + # path of output tar statfile + atmstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.APREFIX}atmstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + + logger.info(f"Compressing {len(diags)} diag files to {atmstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {atmstat} with {len(diags)} gzipped diag files") + with tarfile.open(atmstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # copy full YAML from executable to ROTDIR + logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmvar.yaml") + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # copy bias correction files to ROTDIR + logger.info("Copy bias correction files from DATA/ to COM/") + biasdir = os.path.join(self.task_config.DATA, 'bc') + biasls = os.listdir(biasdir) + biaslist = [] + for bfile in biasls: + src = os.path.join(biasdir, bfile) + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, bfile) + biaslist.append([src, dest]) + + gprefix = f"{self.task_config.GPREFIX}" + gsuffix = f"{to_YMDH(self.task_config.previous_cycle)}" + ".txt" + aprefix = f"{self.task_config.APREFIX}" + asuffix = f"{to_YMDH(self.task_config.current_cycle)}" + ".txt" + + logger.info(f"Copying {gprefix}*{gsuffix} from DATA/ to COM/ as {aprefix}*{asuffix}") + obsdir = os.path.join(self.task_config.DATA, 'obs') + obsls = os.listdir(obsdir) + for ofile in obsls: + if ofile.endswith(".txt"): + src = os.path.join(obsdir, ofile) + tfile = ofile.replace(gprefix, aprefix) + tfile = tfile.replace(gsuffix, asuffix) + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, tfile) + biaslist.append([src, dest]) + + bias_copy = { + 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS], + 'copy': biaslist, + } + FileHandler(bias_copy).sync() + + # Create UFS model readable atm increment file from UFS-DA atm increment + logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") + self.jedi2fv3inc() + + def clean(self): + super().clean() + + @logit(logger) + def get_bkg_dict(self, task_config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 restart files (coupler, core, tracer) + that are needed for global atm DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + task_config: Dict + a dictionary containing all of the configuration needed for the task + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 restart files and just assumed to be fh006 + + # get FV3 restart files, this will be a lot simpler when using history files + rst_dir = os.path.join(task_config.COM_ATMOS_RESTART_PREV) # for now, option later? + run_dir = os.path.join(task_config.DATA, 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # atm DA needs coupler + basename = f'{to_fv3time(task_config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # atm DA needs core, srf_wnd, tracer, phy_data, sfc_data + for ftype in ['core', 'srf_wnd', 'tracer']: + template = f'{to_fv3time(self.task_config.current_cycle)}.fv_{ftype}.res.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + for ftype in ['phy_data', 'sfc_data']: + template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist, + } + return bkg_dict + + @logit(logger) + def get_berror_dict(self, config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of background error files to copy + + This method will construct a dictionary of either bump of gsibec background + error files for global atm DA and return said dictionary for use by the + FileHandler class. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary containing the list of atm background error files to copy for FileHandler + """ + SUPPORTED_BERROR_STATIC_MAP = {'identity': self._get_berror_dict_identity, + 'bump': self._get_berror_dict_bump, + 'gsibec': self._get_berror_dict_gsibec} + + try: + berror_dict = SUPPORTED_BERROR_STATIC_MAP[config.STATICB_TYPE](config) + except KeyError: + raise KeyError(f"{config.STATICB_TYPE} is not a supported background error type.\n" + + f"Currently supported background error types are:\n" + + f'{" | ".join(SUPPORTED_BERROR_STATIC_MAP.keys())}') + + return berror_dict + + @staticmethod + @logit(logger) + def _get_berror_dict_identity(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Identity BE does not need any files for staging. + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + Returns + ---------- + berror_dict: Dict + Empty dictionary [identity BE needs not files to stage] + """ + logger.info(f"Identity background error does not use staged files. Return empty dictionary") + return {} + + @staticmethod + @logit(logger) + def _get_berror_dict_bump(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of atm bump background error files to copy + + This method will construct a dictionary of atm bump background error + files for global atm DA and return said dictionary to the parent + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary of atm bump background error files to copy for FileHandler + """ + # BUMP atm static-B needs nicas, cor_rh, cor_rv and stddev files. + b_dir = config.BERROR_DATA_DIR + b_datestr = to_fv3time(config.BERROR_DATE) + berror_list = [] + for ftype in ['cor_rh', 'cor_rv', 'stddev']: + coupler = f'{b_datestr}.{ftype}.coupler.res' + berror_list.append([ + os.path.join(b_dir, coupler), os.path.join(config.DATA, 'berror', coupler) + ]) + + template = '{b_datestr}.{ftype}.fv_tracer.res.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + tracer = template.format(tilenum=itile) + berror_list.append([ + os.path.join(b_dir, tracer), os.path.join(config.DATA, 'berror', tracer) + ]) + + nproc = config.ntiles * config.layout_x * config.layout_y + for nn in range(1, nproc + 1): + berror_list.append([ + os.path.join(b_dir, f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc'), + os.path.join(config.DATA, 'berror', f'nicas_aero_nicas_local_{nproc:06}-{nn:06}.nc') + ]) + + # create dictionary of background error files to stage + berror_dict = { + 'mkdir': [os.path.join(config.DATA, 'berror')], + 'copy': berror_list, + } + return berror_dict + + @staticmethod + @logit(logger) + def _get_berror_dict_gsibec(config: Dict[str, Any]) -> Dict[str, List[str]]: + """Compile a dictionary of atm gsibec background error files to copy + + This method will construct a dictionary of atm gsibec background error + files for global atm DA and return said dictionary to the parent + + This is a private method and should not be accessed directly. + + Parameters + ---------- + config: Dict + a dictionary containing all of the configuration needed + + Returns + ---------- + berror_dict: Dict + a dictionary of atm gsibec background error files to copy for FileHandler + """ + # GSI atm static-B needs namelist and coefficient files. + b_dir = os.path.join(config.HOMEgfs, 'fix', 'gdas', 'gsibec', config.CASE_ANL) + berror_list = [] + for ftype in ['gfs_gsi_global.nml', 'gsi-coeffs-gfs-global.nc4']: + berror_list.append([ + os.path.join(b_dir, ftype), + os.path.join(config.DATA, 'berror', ftype) + ]) + + # create dictionary of background error files to stage + berror_dict = { + 'mkdir': [os.path.join(config.DATA, 'berror')], + 'copy': berror_list, + } + return berror_dict + + @logit(logger) + def jedi2fv3inc(self: Analysis) -> None: + """Generate UFS model readable analysis increment + + This method writes a UFS DA atm increment in UFS model readable format. + This includes: + - write UFS-DA atm increments using variable names expected by UFS model + - compute and write delp increment + - compute and write hydrostatic delz increment + + Please note that some of these steps are temporary and will be modified + once the modle is able to directly read atm increments. + + """ + # Select the atm guess file based on the analysis and background resolutions + # Fields from the atm guess are used to compute the delp and delz increments + case_anl = int(self.task_config.CASE_ANL[1:]) + case = int(self.task_config.CASE[1:]) + + file = f"{self.task_config.GPREFIX}" + "atmf006" + f"{'' if case_anl == case else '.ensres'}" + ".nc" + atmges_fv3 = os.path.join(self.task_config.COM_ATMOS_HISTORY_PREV, file) + + # Set the path/name to the input UFS-DA atm increment file (atminc_jedi) + # and the output UFS model atm increment file (atminc_fv3) + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + atminc_jedi = os.path.join(self.task_config.DATA, 'anl', f'atminc.{cdate_inc}z.nc4') + atminc_fv3 = os.path.join(self.task_config.COM_ATMOS_ANALYSIS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") + + # Reference the python script which does the actual work + incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') + + # Execute incpy to create the UFS model atm increment file + cmd = Executable(incpy) + cmd.add_default_arg(atmges_fv3) + cmd.add_default_arg(atminc_jedi) + cmd.add_default_arg(atminc_fv3) + logger.debug(f"Executing {cmd}") + cmd(output='stdout', error='stderr') diff --git a/ush/python/pygfs/task/atmens_analysis.py b/ush/python/pygfs/task/atmens_analysis.py new file mode 100644 index 0000000000..c5c7e5b145 --- /dev/null +++ b/ush/python/pygfs/task/atmens_analysis.py @@ -0,0 +1,347 @@ +#!/usr/bin/env python3 + +import os +import glob +import gzip +import tarfile +from logging import getLogger +from typing import Dict, List, Any + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import add_to_datetime, to_fv3time, to_timedelta, to_YMDH, to_YMD +from pygw.fsutils import rm_p, chdir +from pygw.yaml_file import parse_yamltmpl, parse_j2yaml, save_as_yaml +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygw.template import Template, TemplateConstants +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class AtmEnsAnalysis(Analysis): + """ + Class for global atmens analysis tasks + """ + @logit(logger, name="AtmEnsAnalysis") + def __init__(self, config): + super().__init__(config) + + _res = int(self.config.CASE_ENS[1:]) + _window_begin = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config.assim_freq}H") / 2) + _fv3jedi_yaml = os.path.join(self.runtime_config.DATA, f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.atmens.yaml") + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'npx_ges': _res + 1, + 'npy_ges': _res + 1, + 'npz_ges': self.config.LEVS - 1, + 'npz': self.config.LEVS - 1, + 'ATM_WINDOW_BEGIN': _window_begin, + 'ATM_WINDOW_LENGTH': f"PT{self.config.assim_freq}H", + 'OPREFIX': f"{self.config.EUPD_CYC}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'APREFIX': f"{self.runtime_config.CDUMP}.t{self.runtime_config.cyc:02d}z.", # TODO: CDUMP is being replaced by RUN + 'GPREFIX': f"gdas.t{self.runtime_config.previous_cycle.hour:02d}z.", + 'fv3jedi_yaml': _fv3jedi_yaml, + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def initialize(self: Analysis) -> None: + """Initialize a global atmens analysis + + This method will initialize a global atmens analysis using JEDI. + This includes: + - staging CRTM fix files + - staging FV3-JEDI fix files + - staging model backgrounds + - generating a YAML file for the JEDI executable + - creating output directories + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + super().initialize() + + # Make member directories in DATA for background and in DATA and ROTDIR for analysis files + # create template dictionary for output member analysis directories + template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL + tmpl_inc_dict = { + 'ROTDIR': self.task_config.ROTDIR, + 'RUN': self.task_config.RUN, + 'YMD': to_YMD(self.task_config.current_cycle), + 'HH': self.task_config.current_cycle.strftime('%H') + } + dirlist = [] + for imem in range(1, self.task_config.NMEM_ENS + 1): + dirlist.append(os.path.join(self.task_config.DATA, 'bkg', f'mem{imem:03d}')) + dirlist.append(os.path.join(self.task_config.DATA, 'anl', f'mem{imem:03d}')) + + # create output directory path for member analysis + tmpl_inc_dict['MEMDIR'] = f"mem{imem:03d}" + incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) + dirlist.append(incdir) + + FileHandler({'mkdir': dirlist}).sync() + + # stage CRTM fix files + crtm_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_crtm_coeff.yaml') + logger.debug(f"Staging CRTM fix files from {crtm_fix_list_path}") + crtm_fix_list = parse_yamltmpl(crtm_fix_list_path, self.task_config) + FileHandler(crtm_fix_list).sync() + + # stage fix files + jedi_fix_list_path = os.path.join(self.task_config.HOMEgfs, 'parm', 'parm_gdas', 'atm_jedi_fix.yaml') + logger.debug(f"Staging JEDI fix files from {jedi_fix_list_path}") + jedi_fix_list = parse_yamltmpl(jedi_fix_list_path, self.task_config) + FileHandler(jedi_fix_list).sync() + + # stage backgrounds + FileHandler(self.get_bkg_dict()).sync() + + # generate ensemble da YAML file + logger.debug(f"Generate ensemble da YAML file: {self.task_config.fv3jedi_yaml}") + ensda_yaml = parse_j2yaml(self.task_config.ATMENSYAML, self.task_config) + save_as_yaml(ensda_yaml, self.task_config.fv3jedi_yaml) + logger.info(f"Wrote ensemble da YAML to: {self.task_config.fv3jedi_yaml}") + + # need output dir for diags and anl + logger.debug("Create empty output [anl, diags] directories to receive output from executable") + newdirs = [ + os.path.join(self.task_config.DATA, 'anl'), + os.path.join(self.task_config.DATA, 'diags'), + ] + FileHandler({'mkdir': newdirs}).sync() + + @logit(logger) + def execute(self: Analysis) -> None: + """Execute a global atmens analysis + + This method will execute a global atmens analysis using JEDI. + This includes: + - changing to the run directory + - running the global atmens analysis executable + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + chdir(self.task_config.DATA) + + exec_cmd = Executable(self.task_config.APRUN_ATMENSANL) + exec_name = os.path.join(self.task_config.DATA, 'fv3jedi_letkf.x') + exec_cmd.add_default_arg(exec_name) + exec_cmd.add_default_arg(self.task_config.fv3jedi_yaml) + + try: + logger.debug(f"Executing {exec_cmd}") + exec_cmd() + except OSError: + raise OSError(f"Failed to execute {exec_cmd}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exec_cmd}") + + pass + + @logit(logger) + def finalize(self: Analysis) -> None: + """Finalize a global atmens analysis + + This method will finalize a global atmens analysis using JEDI. + This includes: + - tar output diag files and place in ROTDIR + - copy the generated YAML file from initialize to the ROTDIR + - write UFS model readable atm incrment file + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + # ---- tar up diags + # path of output tar statfile + atmensstat = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.APREFIX}atmensstat") + + # get list of diag files to put in tarball + diags = glob.glob(os.path.join(self.task_config.DATA, 'diags', 'diag*nc4')) + + logger.info(f"Compressing {len(diags)} diag files to {atmensstat}.gz") + + # gzip the files first + logger.debug(f"Gzipping {len(diags)} diag files") + for diagfile in diags: + with open(diagfile, 'rb') as f_in, gzip.open(f"{diagfile}.gz", 'wb') as f_out: + f_out.writelines(f_in) + + # open tar file for writing + logger.debug(f"Creating tar file {atmensstat} with {len(diags)} gzipped diag files") + with tarfile.open(atmensstat, "w") as archive: + for diagfile in diags: + diaggzip = f"{diagfile}.gz" + archive.add(diaggzip, arcname=os.path.basename(diaggzip)) + + # copy full YAML from executable to ROTDIR + logger.info(f"Copying {self.task_config.fv3jedi_yaml} to {self.task_config.COM_ATMOS_ANALYSIS_ENS}") + src = os.path.join(self.task_config.DATA, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + dest = os.path.join(self.task_config.COM_ATMOS_ANALYSIS_ENS, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atmens.yaml") + logger.debug(f"Copying {src} to {dest}") + yaml_copy = { + 'mkdir': [self.task_config.COM_ATMOS_ANALYSIS_ENS], + 'copy': [[src, dest]] + } + FileHandler(yaml_copy).sync() + + # Create UFS model readable atm increment file from UFS-DA atm increment + logger.info("Create UFS model readable atm increment file from UFS-DA atm increment") + self.jedi2fv3inc() + + def clean(self): + super().clean() + + @logit(logger) + def jedi2fv3inc(self: Analysis) -> None: + """Generate UFS model readable analysis increment + + This method writes a UFS DA atm increment in UFS model readable format. + This includes: + - write UFS-DA atm increments using variable names expected by UFS model + - compute and write delp increment + - compute and write hydrostatic delz increment + + Please note that some of these steps are temporary and will be modified + once the modle is able to directly read atm increments. + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + # Select the atm guess file based on the analysis and background resolutions + # Fields from the atm guess are used to compute the delp and delz increments + cdate = to_fv3time(self.task_config.current_cycle) + cdate_inc = cdate.replace('.', '_') + + # Reference the python script which does the actual work + incpy = os.path.join(self.task_config.HOMEgfs, 'ush/jediinc2fv3.py') + + # create template dictionaries + template_inc = self.task_config.COM_ATMOS_ANALYSIS_TMPL + tmpl_inc_dict = { + 'ROTDIR': self.task_config.ROTDIR, + 'RUN': self.task_config.RUN, + 'YMD': to_YMD(self.task_config.current_cycle), + 'HH': self.task_config.current_cycle.strftime('%H') + } + + template_ges = self.task_config.COM_ATMOS_HISTORY_TMPL + tmpl_ges_dict = { + 'ROTDIR': self.task_config.ROTDIR, + 'RUN': self.task_config.RUN, + 'YMD': to_YMD(self.task_config.previous_cycle), + 'HH': self.task_config.previous_cycle.strftime('%H') + } + + # loop over ensemble members + for imem in range(1, self.task_config.NMEM_ENS + 1): + memchar = f"mem{imem:03d}" + + # create output path for member analysis increment + tmpl_inc_dict['MEMDIR'] = memchar + incdir = Template.substitute_structure(template_inc, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_inc_dict.get) + + # rewrite UFS-DA atmens increments + tmpl_ges_dict['MEMDIR'] = memchar + gesdir = Template.substitute_structure(template_ges, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_ges_dict.get) + atmges_fv3 = os.path.join(gesdir, f"{self.task_config.CDUMP}.t{self.task_config.previous_cycle.hour:02d}z.atmf006.nc") + atminc_jedi = os.path.join(self.task_config.DATA, 'anl', memchar, f'atminc.{cdate_inc}z.nc4') + atminc_fv3 = os.path.join(incdir, f"{self.task_config.CDUMP}.t{self.task_config.cyc:02d}z.atminc.nc") + + # Execute incpy to create the UFS model atm increment file + # TODO: use MPMD or parallelize with mpi4py + # See https://github.com/NOAA-EMC/global-workflow/pull/1373#discussion_r1173060656 + cmd = Executable(incpy) + cmd.add_default_arg(atmges_fv3) + cmd.add_default_arg(atminc_jedi) + cmd.add_default_arg(atminc_fv3) + logger.debug(f"Executing {cmd}") + cmd(output='stdout', error='stderr') + + @logit(logger) + def get_bkg_dict(self: Analysis) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of ensemble FV3 restart files (coupler, core, tracer) + that are needed for global atmens DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + None + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 restart files and just assumed to be fh006 + # loop over ensemble members + rstlist = [] + bkglist = [] + + # get FV3 restart files, this will be a lot simpler when using history files + template_res = self.task_config.COM_ATMOS_RESTART_TMPL + tmpl_res_dict = { + 'ROTDIR': self.task_config.ROTDIR, + 'RUN': self.task_config.RUN, + 'YMD': to_YMD(self.task_config.previous_cycle), + 'HH': self.task_config.previous_cycle.strftime('%H'), + 'MEMDIR': None + } + + for imem in range(1, self.task_config.NMEM_ENS + 1): + memchar = f"mem{imem:03d}" + + # get FV3 restart files, this will be a lot simpler when using history files + tmpl_res_dict['MEMDIR'] = memchar + rst_dir = Template.substitute_structure(template_res, TemplateConstants.DOLLAR_CURLY_BRACE, tmpl_res_dict.get) + rstlist.append(rst_dir) + + run_dir = os.path.join(self.task_config.DATA, 'bkg', memchar) + + # atmens DA needs coupler + basename = f'{to_fv3time(self.task_config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(self.task_config.DATA, 'bkg', memchar, basename)]) + + # atmens DA needs core, srf_wnd, tracer, phy_data, sfc_data + for ftype in ['fv_core.res', 'fv_srf_wnd.res', 'fv_tracer.res', 'phy_data', 'sfc_data']: + template = f'{to_fv3time(self.task_config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, self.task_config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': rstlist, + 'copy': bkglist, + } + + return bkg_dict diff --git a/ush/python/pygfs/task/gfs_forecast.py b/ush/python/pygfs/task/gfs_forecast.py new file mode 100644 index 0000000000..3527c623e0 --- /dev/null +++ b/ush/python/pygfs/task/gfs_forecast.py @@ -0,0 +1,35 @@ +import os +import logging +from typing import Dict, Any + +from pygw.logger import logit +from pygw.task import Task +from pygfs.ufswm.gfs import GFS + +logger = logging.getLogger(__name__.split('.')[-1]) + + +class GFSForecast(Task): + """ + UFS-weather-model forecast task for the GFS + """ + + @logit(logger, name="GFSForecast") + def __init__(self, config: Dict[str, Any], *args, **kwargs): + """ + Parameters + ---------- + config : Dict + dictionary object containing configuration from environment + + *args : tuple + Additional arguments to `Task` + + **kwargs : dict, optional + Extra keyword arguments to `Task` + """ + + super().__init__(config, *args, **kwargs) + + # Create and initialize the GFS variant of the UFS + self.gfs = GFS(config) diff --git a/ush/python/pygfs/task/land_analysis.py b/ush/python/pygfs/task/land_analysis.py new file mode 100644 index 0000000000..0db6075d64 --- /dev/null +++ b/ush/python/pygfs/task/land_analysis.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python3 + +import os +from logging import getLogger +from typing import Dict, List +from pprint import pformat + +from pygw.attrdict import AttrDict +from pygw.file_utils import FileHandler +from pygw.timetools import to_fv3time, to_YMD, to_YMDH +from pygw.fsutils import rm_p +from pygw.yaml_file import parse_j2yaml +from pygw.jinja import Jinja +from pygw.logger import logit +from pygw.executable import Executable +from pygw.exceptions import WorkflowException +from pygfs.task.analysis import Analysis + +logger = getLogger(__name__.split('.')[-1]) + + +class LandAnalysis(Analysis): + """ + Class for global land analysis tasks + """ + @logit(logger, name="LandAnalysis") + def __init__(self, config): + super().__init__(config) + + # Create a local dictionary that is repeatedly used across this class + local_dict = AttrDict( + { + 'OPREFIX': f"{self.runtime_config.RUN}.t{self.runtime_config.cyc:02d}z.", + } + ) + + # task_config is everything that this task should need + self.task_config = AttrDict(**self.config, **self.runtime_config, **local_dict) + + @logit(logger) + def prepare_IMS(self: Analysis) -> None: + """Prepare the IMS data for a global land analysis + + This method will prepare IMS data for a global land analysis using JEDI. + This includes: + - staging model backgrounds + - processing raw IMS observation data and prepare for conversion to IODA + - creating IMS snowdepth data in IODA format. + + Parameters + ---------- + Analysis: parent class for GDAS task + + Returns + ---------- + None + """ + + # create a temporary dict of all keys needed in this method + cfg = AttrDict() + keys = ['DATA', 'current_cycle', 'COM_OBS', 'COM_ATMOS_RESTART_PREV', + 'OPREFIX', 'CASE', 'ntiles'] + for key in keys: + cfg[key] = self.task_config[key] + + # stage backgrounds + logger.info("Staging backgrounds") + FileHandler(self.get_bkg_dict(cfg)).sync() + + # Read and render the IMS_OBS_LIST yaml + logger.info(f"Reading {self.task_config.IMS_OBS_LIST}") + prep_ims_config = parse_j2yaml(self.task_config.IMS_OBS_LIST, cfg) + logger.debug(f"{self.task_config.IMS_OBS_LIST}:\n{pformat(prep_ims_config)}") + + # copy the IMS obs files from COM_OBS to DATA/obs + logger.info("Copying IMS obs for CALCFIMSEXE") + FileHandler(prep_ims_config.calcfims).sync() + + logger.info("Create namelist for CALCFIMSEXE") + nml_template = self.task_config.FIMS_NML_TMPL + nml_data = Jinja(nml_template, cfg).render + logger.debug(f"fims.nml:\n{nml_data}") + + nml_file = os.path.join(self.task_config.DATA, "fims.nml") + with open(nml_file, "w") as fho: + fho.write(nml_data) + + logger.info("Link CALCFIMSEXE into DATA/") + exe_src = self.task_config.CALCFIMSEXE + exe_dest = os.path.join(self.task_config.DATA, os.path.basename(exe_src)) + if os.path.exists(exe_dest): + rm_p(exe_dest) + os.symlink(exe_src, exe_dest) + + # execute CALCFIMSEXE to calculate IMS snowdepth + exe = Executable(self.task_config.APRUN_CALCFIMS) + exe.add_default_arg(os.path.join(self.task_config.DATA, os.path.basename(exe_src))) + try: + logger.debug(f"Executing {exe}") + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") + + # Ensure the snow depth IMS file is produced by the above executable + input_file = f"IMSscf.{to_YMD(self.task_config.PDY)}.{self.task_config.CASE}_oro_data.nc" + if not os.path.isfile(f"{os.path.join(self.task_config.DATA, input_file)}"): + logger.exception(f"{self.task_config.CALCFIMSEXE} failed to produce {input_file}") + raise FileNotFoundError(f"{os.path.join(self.task_config.DATA, input_file)}") + + # Execute imspy to create the IMS obs data in IODA format + logger.info("Create IMS obs data in IODA format") + + output_file = f"ims_snow_{to_YMDH(self.task_config.current_cycle)}.nc4" + if os.path.isfile(f"{os.path.join(self.task_config.DATA, output_file)}"): + rm_p(output_file) + + exe = Executable(self.task_config.IMS2IODACONV) + exe.add_default_arg(["-i", f"{os.path.join(self.task_config.DATA, input_file)}"]) + exe.add_default_arg(["-o", f"{os.path.join(self.task_config.DATA, output_file)}"]) + try: + logger.debug(f"Executing {exe}") + exe() + except OSError: + raise OSError(f"Failed to execute {exe}") + except Exception: + raise WorkflowException(f"An error occured during execution of {exe}") + + # Ensure the IODA snow depth IMS file is produced by the IODA converter + # If so, copy to COM_OBS/ + if not os.path.isfile(f"{os.path.join(self.task_config.DATA, output_file)}"): + logger.exception(f"{self.task_config.IMS2IODACONV} failed to produce {output_file}") + raise FileNotFoundError(f"{os.path.join(self.task_config.DATA, output_file)}") + else: + logger.info(f"Copy {output_file} to {self.task_config.COM_OBS}") + FileHandler(prep_ims_config.ims2ioda).sync() + + @logit(logger) + def get_bkg_dict(self, config: Dict) -> Dict[str, List[str]]: + """Compile a dictionary of model background files to copy + + This method constructs a dictionary of FV3 RESTART files (coupler, sfc_data) + that are needed for global land DA and returns said dictionary for use by the FileHandler class. + + Parameters + ---------- + self: Analysis + Instance of the current object class + config: Dict + Dictionary of key-value pairs needed in this method + + Returns + ---------- + bkg_dict: Dict + a dictionary containing the list of model background files to copy for FileHandler + """ + # NOTE for now this is FV3 RESTART files and just assumed to be fh006 + + # get FV3 RESTART files, this will be a lot simpler when using history files + rst_dir = os.path.join(config.COM_ATMOS_RESTART_PREV) # for now, option later? + run_dir = os.path.join(config.DATA, 'bkg') + + # Start accumulating list of background files to copy + bkglist = [] + + # land DA needs coupler + basename = f'{to_fv3time(config.current_cycle)}.coupler.res' + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + # land DA only needs sfc_data + for ftype in ['sfc_data']: + template = f'{to_fv3time(config.current_cycle)}.{ftype}.tile{{tilenum}}.nc' + for itile in range(1, config.ntiles + 1): + basename = template.format(tilenum=itile) + bkglist.append([os.path.join(rst_dir, basename), os.path.join(run_dir, basename)]) + + bkg_dict = { + 'mkdir': [run_dir], + 'copy': bkglist + } + return bkg_dict diff --git a/ush/python/pygfs/ufswm/__init__.py b/ush/python/pygfs/ufswm/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ush/python/pygfs/ufswm/gfs.py b/ush/python/pygfs/ufswm/gfs.py new file mode 100644 index 0000000000..f86164d706 --- /dev/null +++ b/ush/python/pygfs/ufswm/gfs.py @@ -0,0 +1,20 @@ +import copy +import logging + +from pygw.logger import logit +from pygfs.ufswm.ufs import UFS + +logger = logging.getLogger(__name__.split('.')[-1]) + + +class GFS(UFS): + + @logit(logger, name="GFS") + def __init__(self, config): + + super().__init__("GFS", config) + + # Start putting fixed properties of the GFS + self.ntiles = 6 + + # Determine coupled/uncoupled from config and define as appropriate diff --git a/ush/python/pygfs/ufswm/ufs.py b/ush/python/pygfs/ufswm/ufs.py new file mode 100644 index 0000000000..a9118801b9 --- /dev/null +++ b/ush/python/pygfs/ufswm/ufs.py @@ -0,0 +1,58 @@ +import re +import copy +import logging +from typing import Dict, Any + +from pygw.template import Template, TemplateConstants +from pygw.logger import logit + +logger = logging.getLogger(__name__.split('.')[-1]) + +UFS_VARIANTS = ['GFS'] + + +class UFS: + + @logit(logger, name="UFS") + def __init__(self, model_name: str, config: Dict[str, Any]): + """Initialize the UFS-weather-model generic class and check if the model_name is a valid variant + + Parameters + ---------- + model_name: str + UFS variant + config : Dict + Incoming configuration dictionary + """ + + # First check if this is a valid variant + if model_name not in UFS_VARIANTS: + logger.warn(f"{model_name} is not a valid UFS variant") + raise NotImplementedError(f"{model_name} is not yet implemented") + + # Make a deep copy of incoming config for caching purposes. _config should not be updated + self._config = copy.deepcopy(config) + + @logit(logger) + def parse_ufs_templates(input_template, output_file, ctx: Dict) -> None: + """ + This method parses UFS-weather-model templates of the pattern @[VARIABLE] + drawing the value from ctx['VARIABLE'] + """ + + with open(input_template, 'r') as fhi: + file_in = fhi.read() + file_out = Template.substitute_structure( + file_in, TemplateConstants.AT_SQUARE_BRACES, ctx.get) + + # If there are unrendered bits, find out what they are + pattern = r"@\[.*?\]+" + matches = re.findall(pattern, file_out) + if matches: + logger.warn(f"{input_template} was rendered incompletely") + logger.warn(f"The following variables were not substituted") + print(matches) # TODO: improve the formatting of this message + # TODO: Should we abort here? or continue to write output_file? + + with open(output_file, 'w') as fho: + fho.write(file_out) diff --git a/ush/python/pygw/.gitignore b/ush/python/pygw/.gitignore new file mode 100644 index 0000000000..13a1a9f851 --- /dev/null +++ b/ush/python/pygw/.gitignore @@ -0,0 +1,139 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Sphinx documentation +docs/_build/ + +# Editor backup files (Emacs, vim) +*~ +*.sw[a-p] + +# Pycharm IDE files +.idea/ diff --git a/ush/python/pygw/README.md b/ush/python/pygw/README.md new file mode 100644 index 0000000000..13db34471c --- /dev/null +++ b/ush/python/pygw/README.md @@ -0,0 +1,36 @@ +# global workflow specific tools + +Python tools specifically for global applications + +## Installation +Simple installation instructions +```sh +$> git clone https://github.com/noaa-emc/global-workflow +$> cd global-workflow/ush/python/pygw +$> pip install . +``` + +It is not required to install this package. Instead, +```sh +$> cd global-workflow/ush/python/pygw +$> export PYTHONPATH=$PWD/src/pygw +``` +would put this package in the `PYTHONPATH` + +### Note: +These instructions will be updated and the tools are under development. + +### Running python tests: +Simple instructions to enable executing pytests manually +```sh +# Create a python virtual environment and step into it +$> cd global-workflow/ush/python/pygw +$> python3 -m venv venv +$> source venv/bin/activate + +# Install pygw with the developer requirements +(venv) $> pip install .[dev] + +# Run pytests +(venv) $> pytest -v +``` diff --git a/ush/python/pygw/setup.cfg b/ush/python/pygw/setup.cfg new file mode 100644 index 0000000000..1d45df0d76 --- /dev/null +++ b/ush/python/pygw/setup.cfg @@ -0,0 +1,62 @@ +[metadata] +name = pygw +version = 0.0.1 +description = Global applications specific workflow related tools +long_description = file: README.md +long_description_content_type = text/markdown +author = "NOAA/NWS/NCEP/EMC" +#author_email = first.last@domain.tld +keywords = NOAA, NWS, NCEP, EMC, GFS, GEFS +home_page = https://github.com/noaa-emc/global-workflow +license = GNU Lesser General Public License +classifiers = + Development Status :: 1 - Beta + Intended Audience :: Developers + Intended Audience :: Science/Research + License :: OSI Approved :: GNU Lesser General Public License + Natural Language :: English + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.6 + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Topic :: Software Development :: Libraries :: Python Modules + Operating System :: OS Independent + Typing :: Typed +project_urls = + Bug Tracker = https://github.com/noaa-emc/global-workflow/issues + CI = https://github.com/noaa-emc/global-workflow/actions + +[options] +zip_safe = False +include_package_data = True +package_dir = + =src +packages = find_namespace: +python_requires = >= 3.6 +setup_requires = + setuptools +install_requires = + numpy==1.21.6 + PyYAML==6.0 + Jinja2==3.1.2 +tests_require = + pytest + +[options.packages.find] +where=src + +[options.package_data] +* = *.txt, *.md + +[options.extras_require] +dev = pytest>=7; pytest-cov>=3 + +[green] +file-pattern = test_*.py +verbose = 2 +no-skip-report = true +quiet-stdout = true +run-coverage = true diff --git a/ush/python/pygw/setup.py b/ush/python/pygw/setup.py new file mode 100644 index 0000000000..e748ce0b71 --- /dev/null +++ b/ush/python/pygw/setup.py @@ -0,0 +1,4 @@ +''' Standard file for building the package with Distutils. ''' + +import setuptools +setuptools.setup() diff --git a/ush/python/pygw/src/pygw/__init__.py b/ush/python/pygw/src/pygw/__init__.py new file mode 100644 index 0000000000..d44158004c --- /dev/null +++ b/ush/python/pygw/src/pygw/__init__.py @@ -0,0 +1,8 @@ +""" +Commonly used toolset for the global applications and beyond. +""" +__docformat__ = "restructuredtext" + +import os + +pygw_directory = os.path.dirname(__file__) diff --git a/ush/python/pygw/src/pygw/attrdict.py b/ush/python/pygw/src/pygw/attrdict.py new file mode 100644 index 0000000000..f2add20a19 --- /dev/null +++ b/ush/python/pygw/src/pygw/attrdict.py @@ -0,0 +1,171 @@ +# attrdict is a Python module that gives you dictionaries whose values are both +# gettable and settable using attributes, in addition to standard item-syntax. +# https://github.com/mewwts/addict +# addict/addict.py -> attrdict.py +# hash: 7e8d23d +# License: MIT +# class Dict -> class AttrDict to prevent name collisions w/ typing.Dict + +import copy + +__all__ = ['AttrDict'] + + +class AttrDict(dict): + + def __init__(__self, *args, **kwargs): + object.__setattr__(__self, '__parent', kwargs.pop('__parent', None)) + object.__setattr__(__self, '__key', kwargs.pop('__key', None)) + object.__setattr__(__self, '__frozen', False) + for arg in args: + if not arg: + continue + elif isinstance(arg, dict): + for key, val in arg.items(): + __self[key] = __self._hook(val) + elif isinstance(arg, tuple) and (not isinstance(arg[0], tuple)): + __self[arg[0]] = __self._hook(arg[1]) + else: + for key, val in iter(arg): + __self[key] = __self._hook(val) + + for key, val in kwargs.items(): + __self[key] = __self._hook(val) + + def __setattr__(self, name, value): + if hasattr(self.__class__, name): + raise AttributeError("'AttrDict' object attribute " + "'{0}' is read-only".format(name)) + else: + self[name] = value + + def __setitem__(self, name, value): + isFrozen = (hasattr(self, '__frozen') and + object.__getattribute__(self, '__frozen')) + if isFrozen and name not in super(AttrDict, self).keys(): + raise KeyError(name) + if isinstance(value, dict): + value = AttrDict(value) + super(AttrDict, self).__setitem__(name, value) + try: + p = object.__getattribute__(self, '__parent') + key = object.__getattribute__(self, '__key') + except AttributeError: + p = None + key = None + if p is not None: + p[key] = self + object.__delattr__(self, '__parent') + object.__delattr__(self, '__key') + + def __add__(self, other): + if not self.keys(): + return other + else: + self_type = type(self).__name__ + other_type = type(other).__name__ + msg = "unsupported operand type(s) for +: '{}' and '{}'" + raise TypeError(msg.format(self_type, other_type)) + + @classmethod + def _hook(cls, item): + if isinstance(item, dict): + return cls(item) + elif isinstance(item, (list, tuple)): + return type(item)(cls._hook(elem) for elem in item) + return item + + def __getattr__(self, item): + return self.__getitem__(item) + + def __missing__(self, name): + if object.__getattribute__(self, '__frozen'): + raise KeyError(name) + return self.__class__(__parent=self, __key=name) + + def __delattr__(self, name): + del self[name] + + def to_dict(self): + base = {} + for key, value in self.items(): + if isinstance(value, type(self)): + base[key] = value.to_dict() + elif isinstance(value, (list, tuple)): + base[key] = type(value)( + item.to_dict() if isinstance(item, type(self)) else + item for item in value) + else: + base[key] = value + return base + + def copy(self): + return copy.copy(self) + + def deepcopy(self): + return copy.deepcopy(self) + + def __deepcopy__(self, memo): + other = self.__class__() + memo[id(self)] = other + for key, value in self.items(): + other[copy.deepcopy(key, memo)] = copy.deepcopy(value, memo) + return other + + def update(self, *args, **kwargs): + other = {} + if args: + if len(args) > 1: + raise TypeError() + other.update(args[0]) + other.update(kwargs) + for k, v in other.items(): + if ((k not in self) or + (not isinstance(self[k], dict)) or + (not isinstance(v, dict))): + self[k] = v + else: + self[k].update(v) + + def __getnewargs__(self): + return tuple(self.items()) + + def __getstate__(self): + return self + + def __setstate__(self, state): + self.update(state) + + def __or__(self, other): + if not isinstance(other, (AttrDict, dict)): + return NotImplemented + new = AttrDict(self) + new.update(other) + return new + + def __ror__(self, other): + if not isinstance(other, (AttrDict, dict)): + return NotImplemented + new = AttrDict(other) + new.update(self) + return new + + def __ior__(self, other): + self.update(other) + return self + + def setdefault(self, key, default=None): + if key in self: + return self[key] + else: + self[key] = default + return default + + def freeze(self, shouldFreeze=True): + object.__setattr__(self, '__frozen', shouldFreeze) + for key, val in self.items(): + if isinstance(val, AttrDict): + val.freeze(shouldFreeze) + + def unfreeze(self): + self.freeze(False) diff --git a/ush/python/pygw/src/pygw/configuration.py b/ush/python/pygw/src/pygw/configuration.py new file mode 100644 index 0000000000..da39a21748 --- /dev/null +++ b/ush/python/pygw/src/pygw/configuration.py @@ -0,0 +1,179 @@ +import glob +import os +import random +import subprocess +from pathlib import Path +from pprint import pprint +from typing import Union, List, Dict, Any + +from pygw.attrdict import AttrDict +from pygw.timetools import to_datetime + +__all__ = ['Configuration', 'cast_as_dtype', 'cast_strdict_as_dtypedict'] + + +class ShellScriptException(Exception): + def __init__(self, scripts, errors): + self.scripts = scripts + self.errors = errors + super(ShellScriptException, self).__init__( + str(errors) + + ': error processing' + + (' '.join(scripts))) + + +class UnknownConfigError(Exception): + pass + + +class Configuration: + """ + Configuration parser for the global-workflow + (or generally for sourcing a shell script into a python dictionary) + """ + + def __init__(self, config_dir: Union[str, Path]): + """ + Given a directory containing config files (config.XYZ), + return a list of config_files minus the ones ending with ".default" + """ + + self.config_dir = config_dir + self.config_files = self._get_configs + + @property + def _get_configs(self) -> List[str]: + """ + Given a directory containing config files (config.XYZ), + return a list of config_files minus the ones ending with ".default" + """ + result = list() + for config in glob.glob(f'{self.config_dir}/config.*'): + if not config.endswith('.default'): + result.append(config) + + return result + + def find_config(self, config_name: str) -> str: + """ + Given a config file name, find the full path of the config file + """ + + for config in self.config_files: + if config_name == os.path.basename(config): + return config + + raise UnknownConfigError( + f'{config_name} does not exist (known: {repr(config_name)}), ABORT!') + + def parse_config(self, files: Union[str, bytes, list]) -> Dict[str, Any]: + """ + Given the name of config file(s), key-value pair of all variables in the config file(s) + are returned as a dictionary + :param files: config file or list of config files + :type files: list or str or unicode + :return: Key value pairs representing the environment variables defined + in the script. + :rtype: dict + """ + if isinstance(files, (str, bytes)): + files = [files] + files = [self.find_config(file) for file in files] + return cast_strdict_as_dtypedict(self._get_script_env(files)) + + def print_config(self, files: Union[str, bytes, list]) -> None: + """ + Given the name of config file(s), key-value pair of all variables in the config file(s) are printed + Same signature as parse_config + :param files: config file or list of config files + :type files: list or str or unicode + :return: None + """ + config = self.parse_config(files) + pprint(config, width=4) + + @classmethod + def _get_script_env(cls, scripts: List) -> Dict[str, Any]: + default_env = cls._get_shell_env([]) + and_script_env = cls._get_shell_env(scripts) + vars_just_in_script = set(and_script_env) - set(default_env) + union_env = dict(default_env) + union_env.update(and_script_env) + return dict([(v, union_env[v]) for v in vars_just_in_script]) + + @staticmethod + def _get_shell_env(scripts: List) -> Dict[str, Any]: + varbls = dict() + runme = ''.join([f'source {s} ; ' for s in scripts]) + magic = f'--- ENVIRONMENT BEGIN {random.randint(0,64**5)} ---' + runme += f'/bin/echo -n "{magic}" ; /usr/bin/env -0' + with open('/dev/null', 'w') as null: + env = subprocess.Popen(runme, shell=True, stdin=null.fileno(), + stdout=subprocess.PIPE) + (out, err) = env.communicate() + out = out.decode() + begin = out.find(magic) + if begin < 0: + raise ShellScriptException(scripts, 'Cannot find magic string; ' + 'at least one script failed: ' + repr(out)) + for entry in out[begin + len(magic):].split('\x00'): + iequal = entry.find('=') + varbls[entry[0:iequal]] = entry[iequal + 1:] + return varbls + + +def cast_strdict_as_dtypedict(ctx: Dict[str, str]) -> Dict[str, Any]: + """ + Environment variables are typically stored as str + This method attempts to translate those into datatypes + Parameters + ---------- + ctx : dict + dictionary with values as str + Returns + ------- + varbles : dict + dictionary with values as datatypes + """ + varbles = AttrDict() + for key, value in ctx.items(): + varbles[key] = cast_as_dtype(value) + return varbles + + +def cast_as_dtype(string: str) -> Union[str, int, float, bool, Any]: + """ + Cast a value into known datatype + Parameters + ---------- + string: str + Returns + ------- + value : str or int or float or datetime + default: str + """ + TRUTHS = ['y', 'yes', 't', 'true', '.t.', '.true.'] + BOOLS = ['n', 'no', 'f', 'false', '.f.', '.false.'] + TRUTHS + BOOLS = [x.upper() for x in BOOLS] + BOOLS + ['Yes', 'No', 'True', 'False'] + + def _cast_or_not(type: Any, string: str): + try: + return type(string) + except ValueError: + return string + + def _true_or_not(string: str): + try: + return string.lower() in TRUTHS + except AttributeError: + return string + + try: + return to_datetime(string) # Try as a datetime + except Exception as exc: + if string in BOOLS: # Likely a boolean, convert to True/False + return _true_or_not(string) + elif '.' in string: # Likely a number and that too a float + return _cast_or_not(float, string) + else: # Still could be a number, may be an integer + return _cast_or_not(int, string) diff --git a/ush/python/pygw/src/pygw/exceptions.py b/ush/python/pygw/src/pygw/exceptions.py new file mode 100644 index 0000000000..a97cba6406 --- /dev/null +++ b/ush/python/pygw/src/pygw/exceptions.py @@ -0,0 +1,87 @@ +# pylint: disable=unused-argument + +# ---- + +from collections.abc import Callable + +from pygw.logger import Logger, logit + +logger = Logger(level="error", colored_log=True) + +__all__ = ["WorkflowException", "msg_except_handle"] + + +class WorkflowException(Exception): + """ + Description + ----------- + + This is the base-class for all exceptions; it is a sub-class of + Exceptions. + + Parameters + ---------- + + msg: str + + A Python string containing a message to accompany the + exception. + + """ + + @logit(logger) + def __init__(self: Exception, msg: str): + """ + Description + ----------- + + Creates a new WorkflowException object. + + """ + + # Define the base-class attributes. + logger.error(msg=msg) + super().__init__() + + +# ---- + + +def msg_except_handle(err_cls: object) -> Callable: + """ + Description + ----------- + + This function provides a decorator to be used to raise specified + exceptions. + + Parameters + ---------- + + err_cls: object + + A Python object containing the WorkflowException subclass to + be used for exception raises. + + Parameters + ---------- + + decorator: Callable + + A Python decorator. + + """ + + # Define the decorator function. + def decorator(func: Callable): + + # Execute the caller function; proceed accordingly. + def call_function(msg: str) -> None: + + # If an exception is encountered, raise the respective + # exception. + raise err_cls(msg=msg) + + return call_function + + return decorator diff --git a/ush/python/pygw/src/pygw/executable.py b/ush/python/pygw/src/pygw/executable.py new file mode 100644 index 0000000000..e9868b0214 --- /dev/null +++ b/ush/python/pygw/src/pygw/executable.py @@ -0,0 +1,357 @@ +import os +import shlex +import subprocess +import sys +from typing import Any, Optional, Union, List + +__all__ = ["Executable", "which", "CommandNotFoundError"] + + +class Executable: + """ + Class representing a program that can be run on the command line. + + Example: + -------- + + >>> from pygw.executable import Executable + >>> cmd = Executable('srun') # Lets say we need to run command e.g. "srun" + >>> cmd.add_default_arg('my_exec.x') # Lets say we need to run the executable "my_exec.x" + >>> cmd.add_default_arg('my_arg.yaml') # Lets say we need to pass an argument to this executable e.g. "my_arg.yaml" + >>> cmd.add_default_env('OMP_NUM_THREADS', 4) # Lets say we want to run w/ 4 threads in the environment + >>> cmd(output='stdout', error='stderr') # Run the command and capture the stdout and stderr in files named similarly. + + `cmd` line above will translate to: + + $ export OMP_NUM_THREADS=4 + $ srun my_exec.x my_arg.yaml 1>&stdout 2>&stderr + + References + ---------- + .. [1] "spack.util.executable.py", https://github.com/spack/spack/blob/develop/lib/spack/spack/util/executable.py + """ + + def __init__(self, name: str): + """ + Construct an executable object. + + Parameters + ---------- + name : str + name of the executable to run + """ + self.exe = shlex.split(str(name)) + self.default_env = {} + self.returncode = None + + if not self.exe: + raise ProcessError(f"Cannot construct executable for '{name}'") + + def add_default_arg(self, arg: Union[str, List]) -> None: + """ + Add a default argument to the command. + Parameters + ---------- + arg : str + argument to the executable + """ + if isinstance(arg, list): + self.exe.extend(arg) + else: + self.exe.append(arg) + + def add_default_env(self, key: str, value: Any) -> None: + """ + Set an environment variable when the command is run. + + Parameters: + ---------- + key : str + The environment variable to set + value : Any + The value to set it to + """ + self.default_env[key] = str(value) + + @property + def command(self) -> str: + """ + The command-line string. + + Returns: + -------- + str : The executable and default arguments + """ + return " ".join(self.exe) + + @property + def name(self) -> str: + """ + The executable name. + + Returns: + -------- + str : The basename of the executable + """ + return os.path.basename(self.path) + + @property + def path(self) -> str: + """ + The path to the executable. + + Returns: + -------- + str : The path to the executable + """ + return self.exe[0] + + def __call__(self, *args, **kwargs): + """ + Run this executable in a subprocess. + + Parameters: + ----------- + *args (str): Command-line arguments to the executable to run + + Keyword Arguments: + ------------------ + _dump_env : Dict + Dict to be set to the environment actually + used (envisaged for testing purposes only) + env : Dict + The environment with which to run the executable + fail_on_error : bool + Raise an exception if the subprocess returns + an error. Default is True. The return code is available as + ``exe.returncode`` + ignore_errors : int or List + A list of error codes to ignore. + If these codes are returned, this process will not raise + an exception even if ``fail_on_error`` is set to ``True`` + input : + Where to read stdin from + output : + Where to send stdout + error : + Where to send stderr + + Accepted values for input, output, and error: + + * python streams, e.g. open Python file objects, or ``os.devnull`` + * filenames, which will be automatically opened for writing + * ``str``, as in the Python string type. If you set these to ``str``, + output and error will be written to pipes and returned as a string. + If both ``output`` and ``error`` are set to ``str``, then one string + is returned containing output concatenated with error. Not valid + for ``input`` + * ``str.split``, as in the ``split`` method of the Python string type. + Behaves the same as ``str``, except that value is also written to + ``stdout`` or ``stderr``. + + By default, the subprocess inherits the parent's file descriptors. + + """ + # Environment + env_arg = kwargs.get("env", None) + + # Setup default environment + env = os.environ.copy() if env_arg is None else {} + env.update(self.default_env) + + # Apply env argument + if env_arg: + env.update(env_arg) + + if "_dump_env" in kwargs: + kwargs["_dump_env"].clear() + kwargs["_dump_env"].update(env) + + fail_on_error = kwargs.pop("fail_on_error", True) + ignore_errors = kwargs.pop("ignore_errors", ()) + + # If they just want to ignore one error code, make it a tuple. + if isinstance(ignore_errors, int): + ignore_errors = (ignore_errors,) + + output = kwargs.pop("output", None) + error = kwargs.pop("error", None) + input = kwargs.pop("input", None) + + if input is str: + raise ValueError("Cannot use `str` as input stream.") + + def streamify(arg, mode): + if isinstance(arg, str): + return open(arg, mode), True + elif arg in (str, str.split): + return subprocess.PIPE, False + else: + return arg, False + + istream, close_istream = streamify(input, "r") + ostream, close_ostream = streamify(output, "w") + estream, close_estream = streamify(error, "w") + + cmd = self.exe + list(args) + + escaped_cmd = ["'%s'" % arg.replace("'", "'\"'\"'") for arg in cmd] + cmd_line_string = " ".join(escaped_cmd) + + proc = None # initialize to avoid lint warning + try: + proc = subprocess.Popen(cmd, stdin=istream, stderr=estream, stdout=ostream, env=env, close_fds=False) + out, err = proc.communicate() + + result = None + if output in (str, str.split) or error in (str, str.split): + result = "" + if output in (str, str.split): + outstr = str(out.decode("utf-8")) + result += outstr + if output is str.split: + sys.stdout.write(outstr) + if error in (str, str.split): + errstr = str(err.decode("utf-8")) + result += errstr + if error is str.split: + sys.stderr.write(errstr) + + rc = self.returncode = proc.returncode + if fail_on_error and rc != 0 and (rc not in ignore_errors): + long_msg = cmd_line_string + if result: + # If the output is not captured in the result, it will have + # been stored either in the specified files (e.g. if + # 'output' specifies a file) or written to the parent's + # stdout/stderr (e.g. if 'output' is not specified) + long_msg += "\n" + result + + raise ProcessError(f"Command exited with status {proc.returncode}:", long_msg) + + return result + + except OSError as e: + raise ProcessError(f"{self.exe[0]}: {e.strerror}", f"Command: {cmd_line_string}") + + except subprocess.CalledProcessError as e: + if fail_on_error: + raise ProcessError( + str(e), + f"\nExit status {proc.returncode} when invoking command: {cmd_line_string}", + ) + + finally: + if close_ostream: + ostream.close() + if close_estream: + estream.close() + if close_istream: + istream.close() + + def __eq__(self, other): + return hasattr(other, "exe") and self.exe == other.exe + + def __neq__(self, other): + return not (self == other) + + def __hash__(self): + return hash((type(self),) + tuple(self.exe)) + + def __repr__(self): + return f"" + + def __str__(self): + return " ".join(self.exe) + + +def which_string(*args, **kwargs) -> str: + """ + Like ``which()``, but return a string instead of an ``Executable``. + + If given multiple executables, returns the string of the first one that is found. + If no executables are found, returns None. + + Parameters: + ----------- + *args : str + One or more executables to search for + + Keyword Arguments: + ------------------ + path : str or List + The path to search. Defaults to ``PATH`` + required : bool + If set to True, raise an error if executable not found + + Returns: + -------- + str : + The first executable that is found in the path + """ + path = kwargs.get("path", os.environ.get("PATH", "")) + required = kwargs.get("required", False) + + if isinstance(path, str): + path = path.split(os.pathsep) + + for name in args: + for candidate_name in [name]: + if os.path.sep in candidate_name: + exe = os.path.abspath(candidate_name) + if os.path.isfile(exe) and os.access(exe, os.X_OK): + return exe + else: + for directory in path: + exe = os.path.join(directory, candidate_name) + if os.path.isfile(exe) and os.access(exe, os.X_OK): + return exe + + if required: + raise CommandNotFoundError(f"'{args[0]}' is required. Make sure it is in your PATH.") + + return None + + +def which(*args, **kwargs) -> Optional[Executable]: + """ + Finds an executable in the PATH like command-line which. + + If given multiple executables, returns the first one that is found. + If no executables are found, returns None. + + Parameters: + ----------- + *args : str + One or more executables to search for + + Keyword Arguments: + ------------------ + path : str or List + The path to search. Defaults to ``PATH`` + required : bool + If set to True, raise an error if executable not found + + Returns: + -------- + Executable: The first executable that is found in the path + """ + exe = which_string(*args, **kwargs) + return Executable(shlex.quote(exe)) if exe else None + + +class ProcessError(Exception): + """ + ProcessErrors are raised when Executables exit with an error code. + """ + def __init__(self, short_msg, long_msg=None): + self.short_msg = short_msg + self.long_msg = long_msg + message = short_msg + '\n' + long_msg if long_msg else short_msg + super().__init__(message) + + +class CommandNotFoundError(OSError): + """ + Raised when ``which()`` cannot find a required executable. + """ diff --git a/ush/python/pygw/src/pygw/file_utils.py b/ush/python/pygw/src/pygw/file_utils.py new file mode 100644 index 0000000000..062a707d05 --- /dev/null +++ b/ush/python/pygw/src/pygw/file_utils.py @@ -0,0 +1,73 @@ +from .fsutils import cp, mkdir + +__all__ = ['FileHandler'] + + +class FileHandler: + """Class to manipulate files in bulk for a given configuration + + Parameters + ---------- + config : dict + A dictionary containing the "action" and the "act" in the form of a list + + NOTE + ---- + "action" can be one of mkdir", "copy", etc. + Corresponding "act" would be ['dir1', 'dir2'], [['src1', 'dest1'], ['src2', 'dest2']] + + Attributes + ---------- + config : dict + Dictionary of files to manipulate + """ + + def __init__(self, config): + + self.config = config + + def sync(self): + """ + Method to execute bulk actions on files described in the configuration + """ + sync_factory = { + 'copy': self._copy_files, + 'mkdir': self._make_dirs, + } + # loop through the configuration keys + for action, files in self.config.items(): + sync_factory[action](files) + + @staticmethod + def _copy_files(filelist): + """Function to copy all files specified in the list + + `filelist` should be in the form: + - [src, dest] + + Parameters + ---------- + filelist : list + List of lists of [src, dest] + """ + for sublist in filelist: + if len(sublist) != 2: + raise Exception( + f"List must be of the form ['src', 'dest'], not {sublist}") + src = sublist[0] + dest = sublist[1] + cp(src, dest) + print(f'Copied {src} to {dest}') # TODO use logger + + @staticmethod + def _make_dirs(dirlist): + """Function to make all directories specified in the list + + Parameters + ---------- + dirlist : list + List of directories to create + """ + for dd in dirlist: + mkdir(dd) + print(f'Created {dd}') # TODO use logger diff --git a/ush/python/pygw/src/pygw/fsutils.py b/ush/python/pygw/src/pygw/fsutils.py new file mode 100644 index 0000000000..50d7d10bbf --- /dev/null +++ b/ush/python/pygw/src/pygw/fsutils.py @@ -0,0 +1,73 @@ +import os +import errno +import shutil +import contextlib + +__all__ = ['mkdir', 'mkdir_p', 'rmdir', 'chdir', 'rm_p', 'cp'] + + +def mkdir_p(path): + try: + os.makedirs(path) + except OSError as exc: + if exc.errno == errno.EEXIST and os.path.isdir(path): + pass + else: + raise OSError(f"unable to create directory at {path}") + + +mkdir = mkdir_p + + +def rmdir(dir_path): + try: + shutil.rmtree(dir_path) + except OSError as exc: + raise OSError(f"unable to remove {dir_path}") + + +@contextlib.contextmanager +def chdir(path): + cwd = os.getcwd() + try: + os.chdir(path) + yield + finally: + print(f"WARNING: Unable to chdir({path})") # TODO: use logging + os.chdir(cwd) + + +def rm_p(path): + try: + os.unlink(path) + except OSError as exc: + if exc.errno == errno.ENOENT: + pass + else: + raise OSError(f"unable to remove {path}") + + +def cp(source: str, target: str) -> None: + """ + copy `source` file to `target` using `shutil.copyfile` + If `target` is a directory, then the filename from `source` is retained into the `target` + Parameters + ---------- + source : str + Source filename + target : str + Destination filename or directory + Returns + ------- + None + """ + + if os.path.isdir(target): + target = os.path.join(target, os.path.basename(source)) + + try: + shutil.copyfile(source, target) + except OSError: + raise OSError(f"unable to copy {source} to {target}") + except Exception as exc: + raise Exception(exc) diff --git a/ush/python/pygw/src/pygw/jinja.py b/ush/python/pygw/src/pygw/jinja.py new file mode 100644 index 0000000000..56aac05453 --- /dev/null +++ b/ush/python/pygw/src/pygw/jinja.py @@ -0,0 +1,229 @@ +import io +import os +import sys +import jinja2 +from markupsafe import Markup +from pathlib import Path +from typing import Dict + +from .timetools import strftime, to_YMDH, to_YMD, to_fv3time, to_isotime, to_julian + +__all__ = ['Jinja'] + + +@jinja2.pass_eval_context +class SilentUndefined(jinja2.Undefined): + """ + Description + ----------- + A Jinja2 undefined that does not raise an error when it is used in a + template. Instead, it returns the template back when the variable is not found + This class is not to be used outside of this file + Its purpose is to return the template instead of an empty string + Presently, it also does not return the filter applied to the variable. + This will be added later when a use case for it presents itself. + """ + def __str__(self): + return "{{ " + self._undefined_name + " }}" + + def __add__(self, other): + return str(self) + other + + def __radd__(self, other): + return other + str(self) + + def __mod__(self, other): + return str(self) % other + + def __call__(self, *args, **kwargs): + return Markup("{{ " + self._undefined_name + " }}") + + +class Jinja: + """ + Description + ----------- + A wrapper around jinja2 to render templates + """ + + def __init__(self, template_path_or_string: str, data: Dict, allow_missing: bool = True): + """ + Description + ----------- + Given a path to a (jinja2) template and a data object, substitute the + template file with data. + Allow for retaining missing or undefined variables. + Parameters + ---------- + template_path_or_string : str + Path to the template file or a templated string + data : dict + Data to be substituted into the template + allow_missing : bool + If True, allow for missing or undefined variables + """ + + self.data = data + self.undefined = SilentUndefined if allow_missing else jinja2.StrictUndefined + + if os.path.isfile(template_path_or_string): + self.template_type = 'file' + self.template_path = Path(template_path_or_string) + else: + self.template_type = 'stream' + self.template_stream = template_path_or_string + + @property + def render(self, data: Dict = None) -> str: + """ + Description + ----------- + Render the Jinja2 template with the data + Parameters + ---------- + data: dict (optional) + Additional data to be used in the template + Not implemented yet. Placed here for future use + Returns + ------- + rendered: str + Rendered template into text + """ + + render_map = {'stream': self._render_stream, + 'file': self._render_file} + return render_map[self.template_type]() + + def get_set_env(self, loader: jinja2.BaseLoader) -> jinja2.Environment: + """ + Description + ----------- + Define the environment for the jinja2 template + Any number of filters can be added here + + Parameters + ---------- + loader: of class jinja2.BaseLoader + Returns + ------- + env: jinja2.Environment + """ + env = jinja2.Environment(loader=loader, undefined=self.undefined) + env.filters["strftime"] = lambda dt, fmt: strftime(dt, fmt) + env.filters["to_isotime"] = lambda dt: to_isotime(dt) if not isinstance(dt, SilentUndefined) else dt + env.filters["to_fv3time"] = lambda dt: to_fv3time(dt) if not isinstance(dt, SilentUndefined) else dt + env.filters["to_YMDH"] = lambda dt: to_YMDH(dt) if not isinstance(dt, SilentUndefined) else dt + env.filters["to_YMD"] = lambda dt: to_YMD(dt) if not isinstance(dt, SilentUndefined) else dt + env.filters["to_julian"] = lambda dt: to_julian(dt) if not isinstance(dt, SilentUndefined) else dt + return env + + @staticmethod + def add_filter_env(env: jinja2.Environment, filter_name: str, filter_func: callable): + """ + Description + ----------- + Add a custom filter to the jinja2 environment + Not implemented yet. Placed here for future use + Parameters + ---------- + env: jinja2.Environment + Active jinja2 environment + filter_name: str + name of the filter + filter_func: callable + function that will be called + Returns + ------- + env: jinja2.Environment + Active jinja2 environment with the new filter added + """ + raise NotImplementedError("Not implemented yet. Placed here for future use") + # Implementation would look something like the following + # env.filters[filter_name] = filter_func + # return env + + def _render_stream(self): + loader = jinja2.BaseLoader() + env = self.get_set_env(loader) + template = env.from_string(self.template_stream) + return self._render_template(template) + + def _render_file(self, data: Dict = None): + template_dir = self.template_path.parent + template_file = self.template_path.relative_to(template_dir) + + loader = jinja2.FileSystemLoader(template_dir) + env = self.get_set_env(loader) + template = env.get_template(str(template_file)) + return self._render_template(template) + + def _render_template(self, template: jinja2.Template): + """ + Description + ----------- + Render a jinja2 template object + Parameters + ---------- + template: jinja2.Template + + Returns + ------- + rendered: str + """ + try: + rendered = template.render(**self.data) + except jinja2.UndefinedError as ee: + raise Exception(f"Undefined variable in Jinja2 template\n{ee}") + + return rendered + + def _render(self, template_name: str, loader: jinja2.BaseLoader) -> str: + """ + Description + ----------- + Internal method to render a jinja2 template + Parameters + ---------- + template_name: str + loader: jinja2.BaseLoader + Returns + ------- + rendered: str + rendered template + """ + env = jinja2.Environment(loader=loader, undefined=self.undefined) + template = env.get_template(template_name) + try: + rendered = template.render(**self.data) + except jinja2.UndefinedError as ee: + raise Exception(f"Undefined variable in Jinja2 template\n{ee}") + + return rendered + + def save(self, output_file: str) -> None: + """ + Description + ----------- + Render and save the output to a file + Parameters + ---------- + output_file: str + Path to the output file + Returns + ------- + None + """ + with open(output_file, 'wb') as fh: + fh.write(self.render.encode("utf-8")) + + def dump(self) -> None: + """ + Description + ----------- + Render and dump the output to stdout + Returns + ------- + None + """ + io.TextIOWrapper(sys.stdout.buffer, + encoding="utf-8").write(self.render) diff --git a/ush/python/pygw/src/pygw/logger.py b/ush/python/pygw/src/pygw/logger.py new file mode 100644 index 0000000000..1bf2ed2985 --- /dev/null +++ b/ush/python/pygw/src/pygw/logger.py @@ -0,0 +1,275 @@ +""" +Logger +""" + +import os +import sys +from functools import wraps +from pathlib import Path +from typing import Union, List +import logging + + +class ColoredFormatter(logging.Formatter): + """ + Logging colored formatter + adapted from https://stackoverflow.com/a/56944256/3638629 + """ + + grey = '\x1b[38;21m' + blue = '\x1b[38;5;39m' + yellow = '\x1b[38;5;226m' + red = '\x1b[38;5;196m' + bold_red = '\x1b[31;1m' + reset = '\x1b[0m' + + def __init__(self, fmt): + super().__init__() + self.fmt = fmt + self.formats = { + logging.DEBUG: self.blue + self.fmt + self.reset, + logging.INFO: self.grey + self.fmt + self.reset, + logging.WARNING: self.yellow + self.fmt + self.reset, + logging.ERROR: self.red + self.fmt + self.reset, + logging.CRITICAL: self.bold_red + self.fmt + self.reset + } + + def format(self, record): + log_fmt = self.formats.get(record.levelno) + formatter = logging.Formatter(log_fmt) + return formatter.format(record) + + +class Logger: + """ + Improved logging + """ + LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] + DEFAULT_LEVEL = 'INFO' + DEFAULT_FORMAT = '%(asctime)s - %(levelname)-8s - %(name)-12s: %(message)s' + + def __init__(self, name: str = None, + level: str = os.environ.get("LOGGING_LEVEL"), + _format: str = DEFAULT_FORMAT, + colored_log: bool = False, + logfile_path: Union[str, Path] = None): + """ + Initialize Logger + + Parameters + ---------- + name : str + Name of the Logger object + default : None + level : str + Desired Logging level + default : 'INFO' + _format : str + Desired Logging Format + default : '%(asctime)s - %(levelname)-8s - %(name)-12s: %(message)s' + colored_log : bool + Use colored logging for stdout + default: False + logfile_path : str or Path + Path for logging to a file + default : None + """ + + self.name = name + self.level = level.upper() if level else Logger.DEFAULT_LEVEL + self.format = _format + self.colored_log = colored_log + + if self.level not in Logger.LOG_LEVELS: + raise LookupError(f"{self.level} is unknown logging level\n" + + f"Currently supported log levels are:\n" + + f"{' | '.join(Logger.LOG_LEVELS)}") + + # Initialize the root logger if no name is present + self._logger = logging.getLogger(name) if name else logging.getLogger() + + self._logger.setLevel(self.level) + + _handlers = [] + # Add console handler for logger + _handler = Logger.add_stream_handler( + level=self.level, + _format=self.format, + colored_log=self.colored_log, + ) + _handlers.append(_handler) + self._logger.addHandler(_handler) + + # Add file handler for logger + if logfile_path is not None: + _handler = Logger.add_file_handler( + logfile_path, level=self.level, _format=self.format) + self._logger.addHandler(_handler) + _handlers.append(_handler) + + def __getattr__(self, attribute): + """ + Allows calling logging module methods directly + + Parameters + ---------- + attribute : str + attribute name of a logging object + + Returns + ------- + attribute : logging attribute + """ + return getattr(self._logger, attribute) + + def get_logger(self): + """ + Return the logging object + + Returns + ------- + logger : Logger object + """ + return self._logger + + @classmethod + def add_handlers(cls, logger: logging.Logger, handlers: List[logging.Handler]): + """ + Add a list of handlers to a logger + + Parameters + ---------- + logger : logging.Logger + Logger object to add a new handler to + handlers: list + A list of handlers to be added to the logger object + + Returns + ------- + logger : Logger object + """ + for handler in handlers: + logger.addHandler(handler) + + return logger + + @classmethod + def add_stream_handler(cls, level: str = DEFAULT_LEVEL, + _format: str = DEFAULT_FORMAT, + colored_log: bool = False): + """ + Create stream handler + This classmethod will allow setting a custom stream handler on children + + Parameters + ---------- + level : str + logging level + default : 'INFO' + _format : str + logging format + default : '%(asctime)s - %(levelname)-8s - %(name)-12s: %(message)s' + colored_log : bool + enable colored output for stdout + default : False + + Returns + ------- + handler : logging.Handler + stream handler of a logging object + """ + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(level) + _format = ColoredFormatter( + _format) if colored_log else logging.Formatter(_format) + handler.setFormatter(_format) + + return handler + + @classmethod + def add_file_handler(cls, logfile_path: Union[str, Path], + level: str = DEFAULT_LEVEL, + _format: str = DEFAULT_FORMAT): + """ + Create file handler. + This classmethod will allow setting custom file handler on children + Create stream handler + This classmethod will allow setting a custom stream handler on children + + Parameters + ---------- + logfile_path: str or Path + Path for writing out logfiles from logging + default : False + level : str + logging level + default : 'INFO' + _format : str + logging format + default : '%(asctime)s - %(levelname)-8s - %(name)-12s: %(message)s' + + Returns + ------- + handler : logging.Handler + file handler of a logging object + """ + + logfile_path = Path(logfile_path) + + # Create the directory containing the logfile_path + if not logfile_path.parent.is_dir(): + logfile_path.mkdir(parents=True, exist_ok=True) + + handler = logging.FileHandler(str(logfile_path)) + handler.setLevel(level) + handler.setFormatter(logging.Formatter(_format)) + + return handler + + +def logit(logger, name=None, message=None): + """ + Logger decorator to add logging to a function. + Simply add: + @logit(logger) before any function + Parameters + ---------- + logger : Logger + Logger object + name : str + Name of the module to be logged + default: __module__ + message : str + Name of the function to be logged + default: __name__ + """ + + def decorate(func): + + log_name = name if name else func.__module__ + log_msg = message if message else log_name + "." + func.__name__ + + @wraps(func) + def wrapper(*args, **kwargs): + + passed_args = [repr(aa) for aa in args] + passed_kwargs = [f"{kk}={repr(vv)}" for kk, vv in list(kwargs.items())] + + call_msg = 'BEGIN: ' + log_msg + logger.info(call_msg) + logger.debug(f"( {', '.join(passed_args + passed_kwargs)} )") + + # Call the function + retval = func(*args, **kwargs) + + # Close the logging with printing the return val + ret_msg = ' END: ' + log_msg + logger.info(ret_msg) + logger.debug(f" returning: {retval}") + + return retval + + return wrapper + + return decorate diff --git a/ush/python/pygw/src/pygw/schema.py b/ush/python/pygw/src/pygw/schema.py new file mode 100644 index 0000000000..2a46c62f01 --- /dev/null +++ b/ush/python/pygw/src/pygw/schema.py @@ -0,0 +1,887 @@ +"""schema is a library for validating Python data structures, such as those +obtained from config-files, forms, external services or command-line +parsing, converted from JSON/YAML (or something else) to Python data-types.""" + +import inspect +import re + +from typing import Dict +from pydoc import locate + +try: + from contextlib import ExitStack +except ImportError: + from contextlib2 import ExitStack + + +__version__ = "0.7.5" +__all__ = [ + "Schema", + "And", + "Or", + "Regex", + "Optional", + "Use", + "Forbidden", + "Const", + "Literal", + "SchemaError", + "SchemaWrongKeyError", + "SchemaMissingKeyError", + "SchemaForbiddenKeyError", + "SchemaUnexpectedTypeError", + "SchemaOnlyOneAllowedError", +] + + +class SchemaError(Exception): + """Error during Schema validation.""" + + def __init__(self, autos, errors=None): + self.autos = autos if type(autos) is list else [autos] + self.errors = errors if type(errors) is list else [errors] + Exception.__init__(self, self.code) + + @property + def code(self): + """ + Removes duplicates values in auto and error list. + parameters. + """ + + def uniq(seq): + """ + Utility function that removes duplicate. + """ + seen = set() + seen_add = seen.add + # This way removes duplicates while preserving the order. + return [x for x in seq if x not in seen and not seen_add(x)] + + data_set = uniq(i for i in self.autos if i is not None) + error_list = uniq(i for i in self.errors if i is not None) + if error_list: + return "\n".join(error_list) + return "\n".join(data_set) + + +class SchemaWrongKeyError(SchemaError): + """Error Should be raised when an unexpected key is detected within the + data set being.""" + + pass + + +class SchemaMissingKeyError(SchemaError): + """Error should be raised when a mandatory key is not found within the + data set being validated""" + + pass + + +class SchemaOnlyOneAllowedError(SchemaError): + """Error should be raised when an only_one Or key has multiple matching candidates""" + + pass + + +class SchemaForbiddenKeyError(SchemaError): + """Error should be raised when a forbidden key is found within the + data set being validated, and its value matches the value that was specified""" + + pass + + +class SchemaUnexpectedTypeError(SchemaError): + """Error should be raised when a type mismatch is detected within the + data set being validated.""" + + pass + + +class And(object): + """ + Utility function to combine validation directives in AND Boolean fashion. + """ + + def __init__(self, *args, **kw): + self._args = args + if not set(kw).issubset({"error", "schema", "ignore_extra_keys"}): + diff = {"error", "schema", "ignore_extra_keys"}.difference(kw) + raise TypeError("Unknown keyword arguments %r" % list(diff)) + self._error = kw.get("error") + self._ignore_extra_keys = kw.get("ignore_extra_keys", False) + # You can pass your inherited Schema class. + self._schema = kw.get("schema", Schema) + + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, ", ".join(repr(a) for a in self._args)) + + @property + def args(self): + """The provided parameters""" + return self._args + + def validate(self, data, **kwargs): + """ + Validate data using defined sub schema/expressions ensuring all + values are valid. + :param data: to be validated with sub defined schemas. + :return: returns validated data + """ + for s in [self._schema(s, error=self._error, ignore_extra_keys=self._ignore_extra_keys) for s in self._args]: + data = s.validate(data, **kwargs) + return data + + +class Or(And): + """Utility function to combine validation directives in a OR Boolean + fashion.""" + + def __init__(self, *args, **kwargs): + self.only_one = kwargs.pop("only_one", False) + self.match_count = 0 + super(Or, self).__init__(*args, **kwargs) + + def reset(self): + failed = self.match_count > 1 and self.only_one + self.match_count = 0 + if failed: + raise SchemaOnlyOneAllowedError(["There are multiple keys present " + "from the %r condition" % self]) + + def validate(self, data, **kwargs): + """ + Validate data using sub defined schema/expressions ensuring at least + one value is valid. + :param data: data to be validated by provided schema. + :return: return validated data if not validation + """ + autos, errors = [], [] + for s in [self._schema(s, error=self._error, ignore_extra_keys=self._ignore_extra_keys) for s in self._args]: + try: + validation = s.validate(data, **kwargs) + self.match_count += 1 + if self.match_count > 1 and self.only_one: + break + return validation + except SchemaError as _x: + autos += _x.autos + errors += _x.errors + raise SchemaError( + ["%r did not validate %r" % (self, data)] + autos, + [self._error.format(data) if self._error else None] + errors, + ) + + +class Regex(object): + """ + Enables schema.py to validate string using regular expressions. + """ + + # Map all flags bits to a more readable description + NAMES = [ + "re.ASCII", + "re.DEBUG", + "re.VERBOSE", + "re.UNICODE", + "re.DOTALL", + "re.MULTILINE", + "re.LOCALE", + "re.IGNORECASE", + "re.TEMPLATE", + ] + + def __init__(self, pattern_str, flags=0, error=None): + self._pattern_str = pattern_str + flags_list = [ + Regex.NAMES[i] for i, f in enumerate("{0:09b}".format(int(flags))) if f != "0" + ] # Name for each bit + + if flags_list: + self._flags_names = ", flags=" + "|".join(flags_list) + else: + self._flags_names = "" + + self._pattern = re.compile(pattern_str, flags=flags) + self._error = error + + def __repr__(self): + return "%s(%r%s)" % (self.__class__.__name__, self._pattern_str, self._flags_names) + + @property + def pattern_str(self): + """The pattern for the represented regular expression""" + return self._pattern_str + + def validate(self, data, **kwargs): + """ + Validated data using defined regex. + :param data: data to be validated + :return: return validated data. + """ + e = self._error + + try: + if self._pattern.search(data): + return data + else: + raise SchemaError("%r does not match %r" % (self, data), e.format(data) if e else None) + except TypeError: + raise SchemaError("%r is not string nor buffer" % data, e) + + +class Use(object): + """ + For more general use cases, you can use the Use class to transform + the data while it is being validate. + """ + + def __init__(self, callable_, error=None): + if not callable(callable_): + raise TypeError("Expected a callable, not %r" % callable_) + self._callable = callable_ + self._error = error + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._callable) + + def validate(self, data, **kwargs): + try: + return self._callable(data) + except SchemaError as x: + raise SchemaError([None] + x.autos, [self._error.format(data) if self._error else None] + x.errors) + except BaseException as x: + f = _callable_str(self._callable) + raise SchemaError("%s(%r) raised %r" % (f, data, x), self._error.format(data) if self._error else None) + + +COMPARABLE, CALLABLE, VALIDATOR, TYPE, DICT, ITERABLE = range(6) + + +def _priority(s): + """Return priority for a given object.""" + if type(s) in (list, tuple, set, frozenset): + return ITERABLE + if type(s) is dict: + return DICT + if issubclass(type(s), type): + return TYPE + if isinstance(s, Literal): + return COMPARABLE + if hasattr(s, "validate"): + return VALIDATOR + if callable(s): + return CALLABLE + else: + return COMPARABLE + + +def _invoke_with_optional_kwargs(f, **kwargs): + s = inspect.signature(f) + if len(s.parameters) == 0: + return f() + return f(**kwargs) + + +class Schema(object): + """ + Entry point of the library, use this class to instantiate validation + schema for the data that will be validated. + """ + + def __init__(self, schema, error=None, ignore_extra_keys=False, name=None, description=None, as_reference=False): + self._schema = schema + self._error = error + self._ignore_extra_keys = ignore_extra_keys + self._name = name + self._description = description + # Ask json_schema to create a definition for this schema and use it as part of another + self.as_reference = as_reference + if as_reference and name is None: + raise ValueError("Schema used as reference should have a name") + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._schema) + + @property + def schema(self): + return self._schema + + @property + def description(self): + return self._description + + @property + def name(self): + return self._name + + @property + def ignore_extra_keys(self): + return self._ignore_extra_keys + + @staticmethod + def _dict_key_priority(s): + """Return priority for a given key object.""" + if isinstance(s, Hook): + return _priority(s._schema) - 0.5 + if isinstance(s, Optional): + return _priority(s._schema) + 0.5 + return _priority(s) + + @staticmethod + def _is_optional_type(s): + """Return True if the given key is optional (does not have to be found)""" + return any(isinstance(s, optional_type) for optional_type in [Optional, Hook]) + + def is_valid(self, data, **kwargs): + """Return whether the given data has passed all the validations + that were specified in the given schema. + """ + try: + self.validate(data, **kwargs) + except SchemaError: + return False + else: + return True + + def _prepend_schema_name(self, message): + """ + If a custom schema name has been defined, prepends it to the error + message that gets raised when a schema error occurs. + """ + if self._name: + message = "{0!r} {1!s}".format(self._name, message) + return message + + def validate(self, data, **kwargs): + Schema = self.__class__ + s = self._schema + e = self._error + i = self._ignore_extra_keys + + if isinstance(s, Literal): + s = s.schema + + flavor = _priority(s) + if flavor == ITERABLE: + data = Schema(type(s), error=e).validate(data, **kwargs) + o = Or(*s, error=e, schema=Schema, ignore_extra_keys=i) + return type(data)(o.validate(d, **kwargs) for d in data) + if flavor == DICT: + exitstack = ExitStack() + data = Schema(dict, error=e).validate(data, **kwargs) + new = type(data)() # new - is a dict of the validated values + coverage = set() # matched schema keys + # for each key and value find a schema entry matching them, if any + sorted_skeys = sorted(s, key=self._dict_key_priority) + for skey in sorted_skeys: + if hasattr(skey, "reset"): + exitstack.callback(skey.reset) + + with exitstack: + # Evaluate dictionaries last + data_items = sorted(data.items(), key=lambda value: isinstance(value[1], dict)) + for key, value in data_items: + for skey in sorted_skeys: + svalue = s[skey] + try: + nkey = Schema(skey, error=e).validate(key, **kwargs) + except SchemaError: + pass + else: + if isinstance(skey, Hook): + # As the content of the value makes little sense for + # keys with a hook, we reverse its meaning: + # we will only call the handler if the value does match + # In the case of the forbidden key hook, + # we will raise the SchemaErrorForbiddenKey exception + # on match, allowing for excluding a key only if its + # value has a certain type, and allowing Forbidden to + # work well in combination with Optional. + try: + nvalue = Schema(svalue, error=e).validate(value, **kwargs) + except SchemaError: + continue + skey.handler(nkey, data, e) + else: + try: + nvalue = Schema(svalue, error=e, ignore_extra_keys=i).validate(value, **kwargs) + except SchemaError as x: + k = "Key '%s' error:" % nkey + message = self._prepend_schema_name(k) + raise SchemaError([message] + x.autos, [e.format(data) if e else None] + x.errors) + else: + new[nkey] = nvalue + coverage.add(skey) + break + required = set(k for k in s if not self._is_optional_type(k)) + if not required.issubset(coverage): + missing_keys = required - coverage + s_missing_keys = ", ".join(repr(k) for k in sorted(missing_keys, key=repr)) + message = "Missing key%s: %s" % (_plural_s(missing_keys), s_missing_keys) + message = self._prepend_schema_name(message) + raise SchemaMissingKeyError(message, e.format(data) if e else None) + if not self._ignore_extra_keys and (len(new) != len(data)): + wrong_keys = set(data.keys()) - set(new.keys()) + s_wrong_keys = ", ".join(repr(k) for k in sorted(wrong_keys, key=repr)) + message = "Wrong key%s %s in %r" % (_plural_s(wrong_keys), s_wrong_keys, data) + message = self._prepend_schema_name(message) + raise SchemaWrongKeyError(message, e.format(data) if e else None) + + # Apply default-having optionals that haven't been used: + defaults = set(k for k in s if isinstance(k, Optional) and hasattr(k, "default")) - coverage + for default in defaults: + new[default.key] = _invoke_with_optional_kwargs(default.default, **kwargs) if callable(default.default) else default.default + + return new + if flavor == TYPE: + if isinstance(data, s) and not (isinstance(data, bool) and s == int): + return data + else: + message = "%r should be instance of %r" % (data, s.__name__) + message = self._prepend_schema_name(message) + raise SchemaUnexpectedTypeError(message, e.format(data) if e else None) + if flavor == VALIDATOR: + try: + return s.validate(data, **kwargs) + except SchemaError as x: + raise SchemaError([None] + x.autos, [e.format(data) if e else None] + x.errors) + except BaseException as x: + message = "%r.validate(%r) raised %r" % (s, data, x) + message = self._prepend_schema_name(message) + raise SchemaError(message, e.format(data) if e else None) + if flavor == CALLABLE: + f = _callable_str(s) + try: + if s(data): + return data + except SchemaError as x: + raise SchemaError([None] + x.autos, [e.format(data) if e else None] + x.errors) + except BaseException as x: + message = "%s(%r) raised %r" % (f, data, x) + message = self._prepend_schema_name(message) + raise SchemaError(message, e.format(data) if e else None) + message = "%s(%r) should evaluate to True" % (f, data) + message = self._prepend_schema_name(message) + raise SchemaError(message, e.format(data) if e else None) + if s == data: + return data + else: + message = "%r does not match %r" % (s, data) + message = self._prepend_schema_name(message) + raise SchemaError(message, e.format(data) if e else None) + + def json_schema(self, schema_id, use_refs=False, **kwargs): + """Generate a draft-07 JSON schema dict representing the Schema. + This method must be called with a schema_id. + + :param schema_id: The value of the $id on the main schema + :param use_refs: Enable reusing object references in the resulting JSON schema. + Schemas with references are harder to read by humans, but are a lot smaller when there + is a lot of reuse + """ + + seen = dict() # For use_refs + definitions_by_name = {} + + def _json_schema(schema, is_main_schema=True, description=None, allow_reference=True): + Schema = self.__class__ + + def _create_or_use_ref(return_dict): + """If not already seen, return the provided part of the schema unchanged. + If already seen, give an id to the already seen dict and return a reference to the previous part + of the schema instead. + """ + if not use_refs or is_main_schema: + return return_schema + + hashed = hash(repr(sorted(return_dict.items()))) + + if hashed not in seen: + seen[hashed] = return_dict + return return_dict + else: + id_str = "#" + str(hashed) + seen[hashed]["$id"] = id_str + return {"$ref": id_str} + + def _get_type_name(python_type): + """Return the JSON schema name for a Python type""" + if python_type == str: + return "string" + elif python_type == int: + return "integer" + elif python_type == float: + return "number" + elif python_type == bool: + return "boolean" + elif python_type == list: + return "array" + elif python_type == dict: + return "object" + return "string" + + def _to_json_type(value): + """Attempt to convert a constant value (for "const" and "default") to a JSON serializable value""" + if value is None or type(value) in (str, int, float, bool, list, dict): + return value + + if type(value) in (tuple, set, frozenset): + return list(value) + + if isinstance(value, Literal): + return value.schema + + return str(value) + + def _to_schema(s, ignore_extra_keys): + if not isinstance(s, Schema): + return Schema(s, ignore_extra_keys=ignore_extra_keys) + + return s + + s = schema.schema + i = schema.ignore_extra_keys + flavor = _priority(s) + + return_schema = {} + + return_description = description or schema.description + if return_description: + return_schema["description"] = return_description + + # Check if we have to create a common definition and use as reference + if allow_reference and schema.as_reference: + # Generate sub schema if not already done + if schema.name not in definitions_by_name: + definitions_by_name[schema.name] = {} # Avoid infinite loop + definitions_by_name[schema.name] = _json_schema(schema, is_main_schema=False, allow_reference=False) + + return_schema["$ref"] = "#/definitions/" + schema.name + else: + if flavor == TYPE: + # Handle type + return_schema["type"] = _get_type_name(s) + elif flavor == ITERABLE: + # Handle arrays or dict schema + + return_schema["type"] = "array" + if len(s) == 1: + return_schema["items"] = _json_schema(_to_schema(s[0], i), is_main_schema=False) + elif len(s) > 1: + return_schema["items"] = _json_schema(Schema(Or(*s)), is_main_schema=False) + elif isinstance(s, Or): + # Handle Or values + + # Check if we can use an enum + if all(priority == COMPARABLE for priority in [_priority(value) for value in s.args]): + or_values = [str(s) if isinstance(s, Literal) else s for s in s.args] + # All values are simple, can use enum or const + if len(or_values) == 1: + return_schema["const"] = _to_json_type(or_values[0]) + return return_schema + return_schema["enum"] = or_values + else: + # No enum, let's go with recursive calls + any_of_values = [] + for or_key in s.args: + new_value = _json_schema(_to_schema(or_key, i), is_main_schema=False) + if new_value != {} and new_value not in any_of_values: + any_of_values.append(new_value) + if len(any_of_values) == 1: + # Only one representable condition remains, do not put under anyOf + return_schema.update(any_of_values[0]) + else: + return_schema["anyOf"] = any_of_values + elif isinstance(s, And): + # Handle And values + all_of_values = [] + for and_key in s.args: + new_value = _json_schema(_to_schema(and_key, i), is_main_schema=False) + if new_value != {} and new_value not in all_of_values: + all_of_values.append(new_value) + if len(all_of_values) == 1: + # Only one representable condition remains, do not put under allOf + return_schema.update(all_of_values[0]) + else: + return_schema["allOf"] = all_of_values + elif flavor == COMPARABLE: + return_schema["const"] = _to_json_type(s) + elif flavor == VALIDATOR and type(s) == Regex: + return_schema["type"] = "string" + return_schema["pattern"] = s.pattern_str + else: + if flavor != DICT: + # If not handled, do not check + return return_schema + + # Schema is a dict + + required_keys = [] + expanded_schema = {} + additional_properties = i + for key in s: + if isinstance(key, Hook): + continue + + def _key_allows_additional_properties(key): + """Check if a key is broad enough to allow additional properties""" + if isinstance(key, Optional): + return _key_allows_additional_properties(key.schema) + + return key == str or key == object + + def _get_key_description(key): + """Get the description associated to a key (as specified in a Literal object). Return None if not a Literal""" + if isinstance(key, Optional): + return _get_key_description(key.schema) + + if isinstance(key, Literal): + return key.description + + return None + + def _get_key_name(key): + """Get the name of a key (as specified in a Literal object). Return the key unchanged if not a Literal""" + if isinstance(key, Optional): + return _get_key_name(key.schema) + + if isinstance(key, Literal): + return key.schema + + return key + + additional_properties = additional_properties or _key_allows_additional_properties(key) + sub_schema = _to_schema(s[key], ignore_extra_keys=i) + key_name = _get_key_name(key) + + if isinstance(key_name, str): + if not isinstance(key, Optional): + required_keys.append(key_name) + expanded_schema[key_name] = _json_schema( + sub_schema, is_main_schema=False, description=_get_key_description(key) + ) + if isinstance(key, Optional) and hasattr(key, "default"): + expanded_schema[key_name]["default"] = _to_json_type(_invoke_with_optional_kwargs(key.default, **kwargs) if callable(key.default) else key.default) # nopep8 + elif isinstance(key_name, Or): + # JSON schema does not support having a key named one name or another, so we just add both options + # This is less strict because we cannot enforce that one or the other is required + + for or_key in key_name.args: + expanded_schema[_get_key_name(or_key)] = _json_schema( + sub_schema, is_main_schema=False, description=_get_key_description(or_key) + ) + + return_schema.update( + { + "type": "object", + "properties": expanded_schema, + "required": required_keys, + "additionalProperties": additional_properties, + } + ) + + if is_main_schema: + return_schema.update({"$id": schema_id, "$schema": "http://json-schema.org/draft-07/schema#"}) + if self._name: + return_schema["title"] = self._name + + if definitions_by_name: + return_schema["definitions"] = {} + for definition_name, definition in definitions_by_name.items(): + return_schema["definitions"][definition_name] = definition + + return _create_or_use_ref(return_schema) + + return _json_schema(self, True) + + +class Optional(Schema): + """Marker for an optional part of the validation Schema.""" + + _MARKER = object() + + def __init__(self, *args, **kwargs): + default = kwargs.pop("default", self._MARKER) + super(Optional, self).__init__(*args, **kwargs) + if default is not self._MARKER: + # See if I can come up with a static key to use for myself: + if _priority(self._schema) != COMPARABLE: + raise TypeError( + "Optional keys with defaults must have simple, " + "predictable values, like literal strings or ints. " + '"%r" is too complex.' % (self._schema,) + ) + self.default = default + self.key = str(self._schema) + + def __hash__(self): + return hash(self._schema) + + def __eq__(self, other): + return ( + self.__class__ is other.__class__ and + getattr(self, "default", self._MARKER) == getattr(other, "default", self._MARKER) and + self._schema == other._schema + ) + + def reset(self): + if hasattr(self._schema, "reset"): + self._schema.reset() + + +class Hook(Schema): + def __init__(self, *args, **kwargs): + self.handler = kwargs.pop("handler", lambda *args: None) + super(Hook, self).__init__(*args, **kwargs) + self.key = self._schema + + +class Forbidden(Hook): + def __init__(self, *args, **kwargs): + kwargs["handler"] = self._default_function + super(Forbidden, self).__init__(*args, **kwargs) + + @staticmethod + def _default_function(nkey, data, error): + raise SchemaForbiddenKeyError("Forbidden key encountered: %r in %r" % (nkey, data), error) + + +class Literal(object): + def __init__(self, value, description=None): + self._schema = value + self._description = description + + def __str__(self): + return self._schema + + def __repr__(self): + return 'Literal("' + self.schema + '", description="' + (self.description or "") + '")' + + @property + def description(self): + return self._description + + @property + def schema(self): + return self._schema + + +class Const(Schema): + def validate(self, data, **kwargs): + super(Const, self).validate(data, **kwargs) + return data + + +def _callable_str(callable_): + if hasattr(callable_, "__name__"): + return callable_.__name__ + return str(callable_) + + +def _plural_s(sized): + return "s" if len(sized) > 1 else "" + + +# The following functions are added to be able to translate an user-specified Dict into a SchemaDict. The Schema +# class module was obtained from: + +# https://github.com/keleshev/schema/blob/master/schema.py + + +def build_schema(data: Dict) -> Dict: + """ + Description + ----------- + + This function takes in a user-provided dictionary and defines the + respective schema. + + Parameters + ---------- + + data: Dict + + A Python dictionary containing the schema attributes. + + Returns + ------- + + schema_dict: Dict + + A Python dictionary containing the schema. + + """ + + # TODO: Find an alternative to pydoc.locate() to identify type. + schema_dict = {} + for datum in data: + data_dict = data[datum] + + # Check whether the variable is optional; proceed accordingly. + if "optional" not in data_dict: + data_dict['optional'] = False + schema_dict[datum] = locate(data_dict["type"]) + else: + if data_dict['optional']: + schema_dict[datum] = locate(data_dict["type"]) + + # Build the schema accordingly. + try: + if data_dict["optional"]: + schema_dict[Optional(datum, default=data_dict["default"]) + ] = locate(data_dict["type"]) + else: + schema_dict[datum] = locate(data_dict["type"]) + except AttributeError: + pass + + return schema_dict + + +def validate_schema(schema_dict: Dict, data: Dict) -> Dict: + """ + Description + ------------ + + This function validates the schema; if an optional key value has + not be specified, a the default value for the option is defined + within the returned Dict. + + Parameters + ---------- + + schema_dict: Dict + + A Python dictionary containing the schema. + + data: Dict + + A Python dictionary containing the configuration to be + validated. + + Returns + ------- + + data: Dict + + A Python dictionary containing the validated schema; if any + optional values have not been define within `data` (above), + they are updated with the schema default values. + + """ + + # Define the schema instance. + schema = Schema([schema_dict], ignore_extra_keys=True) + + # If any `Optional` keys are missing from the scheme to be + # validated (`data`), update them acccordingly. + for k, v in schema_dict.items(): + if isinstance(k, Optional): + if k.key not in data: + data[k.key] = k.default + + # Validate the schema and return the updated dictionary. + schema.validate([data]) + + return data diff --git a/ush/python/pygw/src/pygw/task.py b/ush/python/pygw/src/pygw/task.py new file mode 100644 index 0000000000..22ce4626d8 --- /dev/null +++ b/ush/python/pygw/src/pygw/task.py @@ -0,0 +1,93 @@ +import logging +from typing import Dict + +from pygw.attrdict import AttrDict +from pygw.timetools import add_to_datetime, to_timedelta + +logger = logging.getLogger(__name__.split('.')[-1]) + + +class Task: + """ + Base class for all tasks + """ + + def __init__(self, config: Dict, *args, **kwargs): + """ + Every task needs a config. + Additional arguments (or key-value arguments) can be provided. + + Parameters + ---------- + config : Dict + dictionary object containing task configuration + + *args : tuple + Additional arguments to `Task` + + **kwargs : dict, optional + Extra keyword arguments to `Task` + """ + + # Store the config and arguments as attributes of the object + self.config = AttrDict(config) + + for arg in args: + setattr(self, str(arg), arg) + + for key, value in kwargs.items(): + setattr(self, key, value) + + # Pull out basic runtime keys values from config into its own runtime config + self.runtime_config = AttrDict() + runtime_keys = ['PDY', 'cyc', 'DATA', 'RUN', 'CDUMP'] # TODO: eliminate CDUMP and use RUN instead + for kk in runtime_keys: + try: + self.runtime_config[kk] = config[kk] + logger.debug(f'Deleting runtime_key {kk} from config') + del self.config[kk] + except KeyError: + raise KeyError(f"Encountered an unreferenced runtime_key {kk} in 'config'") + + # Any other composite runtime variables that may be needed for the duration of the task + # can be constructed here + + # Construct the current cycle datetime object + self.runtime_config['current_cycle'] = add_to_datetime(self.runtime_config['PDY'], to_timedelta(f"{self.runtime_config.cyc}H")) + logger.debug(f"current cycle: {self.runtime_config['current_cycle']}") + + # Construct the previous cycle datetime object + self.runtime_config['previous_cycle'] = add_to_datetime(self.runtime_config.current_cycle, -to_timedelta(f"{self.config['assim_freq']}H")) + logger.debug(f"previous cycle: {self.runtime_config['previous_cycle']}") + + pass + + def initialize(self): + """ + Initialize methods for a task + """ + pass + + def configure(self): + """ + Configuration methods for a task in preparation for execution + """ + pass + + def execute(self): + """ + Execute methods for a task + """ + pass + + def finalize(self): + """ + Methods for after the execution that produces output task + """ + pass + + def clean(self): + """ + Methods to clean after execution and finalization prior to closing out a task + """ + pass diff --git a/ush/python/pygw/src/pygw/template.py b/ush/python/pygw/src/pygw/template.py new file mode 100644 index 0000000000..8532305783 --- /dev/null +++ b/ush/python/pygw/src/pygw/template.py @@ -0,0 +1,191 @@ +import re +import os +import copy +from collections import namedtuple +from collections.abc import Sequence + +# Template imported with permission from jcsda/solo + +__all__ = ['Template', 'TemplateConstants'] + + +class TemplateConstants: + DOLLAR_CURLY_BRACE = '${}' + DOLLAR_PARENTHESES = '$()' + DOUBLE_CURLY_BRACES = '{{}}' + AT_SQUARE_BRACES = '@[]' + AT_ANGLE_BRACKETS = '@<>' + + SubPair = namedtuple('SubPair', ['regex', 'slice']) + + +class Template: + + """ + Utility for substituting variables in a template. The template can be the contents of a whole file + as a string (substitute_string) or in a complex dictionary (substitute_structure). + substitutions define different type of variables with a regex and a slice: + - the regex is supposed to find the whole variable, e.g, $(variable) + - the slice indicate how to slice the value returned by the regex to have the variable name, in the + case of $(variable), the slice is 2, -1 to remove $( and ). + You can easily add new type of variables following those rules. + + Please note that the regexes allow for at least one nested variable and the code is able to handle it. + It means that $($(variable)) will be processed correctly but the substitutions will need more than one + pass. + + If you have a file that is deeper than just a simple dictionary of has lists in it, you can use the method + build_index to create a dictionary that will have all the options from deeper levels (list, dicts). + You can then pass index.get as an argument to any method you use. + If you use substitute_with_dependencies, this is done automatically. + """ + + substitutions = { + TemplateConstants.DOLLAR_CURLY_BRACE: TemplateConstants.SubPair(re.compile(r'\${.*?}+'), slice(2, -1)), + TemplateConstants.DOLLAR_PARENTHESES: TemplateConstants.SubPair(re.compile(r'\$\(.*?\)+'), slice(2, -1)), + TemplateConstants.DOUBLE_CURLY_BRACES: TemplateConstants.SubPair(re.compile(r'{{.*?}}+'), slice(2, -2)), + TemplateConstants.AT_SQUARE_BRACES: TemplateConstants.SubPair(re.compile(r'@\[.*?\]+'), slice(2, -1)), + TemplateConstants.AT_ANGLE_BRACKETS: TemplateConstants.SubPair( + re.compile(r'@\<.*?\>+'), slice(2, -1)) + } + + @classmethod + def find_variables(cls, variable_to_substitute: str, var_type: str): + pair = cls.substitutions[var_type] + return [x[pair.slice] for x in re.findall(pair.regex, variable_to_substitute)] + + @classmethod + def substitute_string(cls, variable_to_substitute, var_type: str, get_value): + """ + Substitutes variables under the form var_type (e.g. DOLLAR_CURLY_BRACE), looks for a value returned + by function get_value and if found, substitutes the variable. Convert floats and int to string + before substitution. If the value in the dictionary is a complex type, just assign it instead + of substituting. + get_value is a function that returns the value to substitute: + signature: get_value(variable_name). + If substituting from a dictionary my_dict, pass my_dict.get + """ + pair = cls.substitutions[var_type] + if isinstance(variable_to_substitute, str): + variable_names = re.findall(pair.regex, variable_to_substitute) + for variable in variable_names: + var = variable[pair.slice] + v = get_value(var) + if v is not None: + if not is_single_type_or_string(v): + if len(variable_names) == 1: + # v could be a list or a dictionary (complex structure and not a string). + # If there is one variable that is the whole + # string, we can safely replace, otherwise do nothing. + if variable_to_substitute.replace(variable_names[0][pair.slice], '') == var_type: + variable_to_substitute = v + else: + if isinstance(v, float) or isinstance(v, int): + v = str(v) + if isinstance(v, str): + variable_to_substitute = variable_to_substitute.replace( + variable, v) + else: + variable_to_substitute = v + else: + more = re.search(pair.regex, var) + if more is not None: + new_value = cls.substitute_string( + var, var_type, get_value) + variable_to_substitute = variable_to_substitute.replace( + var, new_value) + return variable_to_substitute + + @classmethod + def substitute_structure(cls, structure_to_substitute, var_type: str, get_value): + """ + Traverses a dictionary and substitutes variables in fields, lists + and nested dictionaries. + """ + if isinstance(structure_to_substitute, dict): + for key, item in structure_to_substitute.items(): + structure_to_substitute[key] = cls.substitute_structure( + item, var_type, get_value) + elif is_sequence_and_not_string(structure_to_substitute): + for i, item in enumerate(structure_to_substitute): + structure_to_substitute[i] = cls.substitute_structure( + item, var_type, get_value) + else: + structure_to_substitute = cls.substitute_string(structure_to_substitute, var_type, + get_value) + return structure_to_substitute + + @classmethod + def substitute_structure_from_environment(cls, structure_to_substitute): + return cls.substitute_structure(structure_to_substitute, TemplateConstants.DOLLAR_CURLY_BRACE, os.environ.get) + + @classmethod + def substitute_with_dependencies(cls, dictionary, keys, var_type: str, shallow_precedence=True, excluded=()): + """ + Given a dictionary with a complex (deep) structure, we want to substitute variables, + using keys, another dictionary that may also have a deep structure (dictionary and keys + can be the same dictionary if you want to substitute in place). + We create an index based on keys (see build_index) and substitute values in dictionary + using index. If variables may refer to other variables, more than one pass of substitution + may be needed, so we substitute until there is no more change in dictionary (convergence). + """ + all_variables = cls.build_index(keys, excluded, shallow_precedence) + previous = {} + while dictionary != previous: + previous = copy.deepcopy(dictionary) + dictionary = cls.substitute_structure( + dictionary, var_type, all_variables.get) + return dictionary + + @classmethod + def build_index(cls, dictionary, excluded=None, shallow_precedence=True): + """ + Builds an index of all keys with their values, going deep into the dictionary. The index + if a flat structure (dictionary). + If the same key name is present more than once in the structure, we want to + either prioritise the values that are near the root of the tree (shallow_precedence=True) + or values that are near the leaves (shallow_precedence=False). We don't anticipate use + cases where the "nearest variable" should be used, but this could constitute a future + improvement. + """ + def build(structure, variables): + if isinstance(structure, dict): + for k, i in structure.items(): + if ((k not in variables) or (k in variables and not shallow_precedence)) and k not in excluded: + variables[k] = i + build(i, variables) + elif is_sequence_and_not_string(structure): + for v in structure: + build(v, variables) + var = {} + if excluded is None: + excluded = set() + build(dictionary, var) + return var + + +# These used to be in basic.py, and have been copied here till they are needed elsewhere. + + +def is_sequence_and_not_string(a): + return isinstance(a, Sequence) and not isinstance(a, str) + + +def is_single_type(s): + try: + len(s) + except TypeError: + return True + else: + return False + + +def is_single_type_or_string(s): + if isinstance(s, str): + return True + try: + len(s) + except TypeError: + return True + else: + return False diff --git a/ush/python/pygw/src/pygw/timetools.py b/ush/python/pygw/src/pygw/timetools.py new file mode 100644 index 0000000000..cd43b55bfa --- /dev/null +++ b/ush/python/pygw/src/pygw/timetools.py @@ -0,0 +1,316 @@ +import re +import datetime + + +__all__ = ["to_datetime", "to_timedelta", + "datetime_to_YMDH", "datetime_to_YMD", "datetime_to_JDAY", + "timedelta_to_HMS", + "strftime", "strptime", + "to_YMDH", "to_YMD", "to_JDAY", "to_julian", + "to_isotime", "to_fv3time", + "add_to_datetime", "add_to_timedelta"] + + +_DATETIME_RE = re.compile( + r"(?P\d{4})(-)?(?P\d{2})(-)?(?P\d{2})" + r"(T)?(?P\d{2})?(:)?(?P\d{2})?(:)?(?P\d{2})?(Z)?") + +_TIMEDELTA_HOURS_RE = re.compile( + r"(?P[+-])?" + r"((?P\d+)[d])?" + r"(T)?((?P\d+)[H])?((?P\d+)[M])?((?P\d+)[S])?(Z)?") +_TIMEDELTA_TIME_RE = re.compile( + r"(?P[+-])?" + r"((?P\d+)(\s)day(s)?,(\s)?)?" + r"(T)?(?P\d{1,2})?(:(?P\d{1,2}))?(:(?P\d{1,2}))?") + + +def to_datetime(dtstr: str) -> datetime.datetime: + """ + Description + ----------- + Translate a string into a datetime object in a generic way. + The string can also support ISO 8601 representation. + + Formats accepted (T, Z, -, :) are optional: + YYYY-mm-dd + YYYY-mm-ddTHHZ + YYYY-mm-ddTHH:MMZ + YYYY-mm-ddTHH:MM:SSZ + + Parameters + ---------- + dtstr : str + String to be translated into a datetime object + + Returns + ------- + datetime.datetime + Datetime object + """ + + mm = _DATETIME_RE.match(dtstr) + if mm: + return datetime.datetime(**{kk: int(vv) for kk, vv in mm.groupdict().items() if vv}) + else: + raise Exception(f"Bad datetime string: '{dtstr}'") + + +def to_timedelta(tdstr: str) -> datetime.timedelta: + """ + Description + ----------- + Translate a string into a timedelta object in a generic way + + Formats accepted (, T, Z) are optional: +
dTHMSZ +
day(s), hh:mm:ss + + can be +/-, default is + +
can be any integer, default is 0 + can be any integer, default is 0 + can be any integer, default is 0 + can be any integer, default is 0 + + Parameters + ---------- + tdstr : str + String to be translated into a timedelta object + + Returns + ------- + datetime.timedelta + Timedelta object + """ + + time_dict = {'sign': '+', + 'days': 0, + 'hours': 0, + 'minutes': 0, + 'seconds': 0} + + if any(x in tdstr for x in ['day', 'days', ':']): + mm = _TIMEDELTA_TIME_RE.match(tdstr) # timedelta representation + else: + mm = _TIMEDELTA_HOURS_RE.match(tdstr) # ISO 8601 representation + + if mm: + nmm = {kk: vv if vv is not None else time_dict[kk] + for kk, vv in mm.groupdict().items()} + del nmm['sign'] + nmm = {kk: float(vv) for kk, vv in nmm.items()} + dt = datetime.timedelta(**nmm) + if mm.group('sign') is not None and mm.group('sign') == '-': + dt = -dt + return dt + else: + raise Exception(f"Bad timedelta string: '{tdstr}'") + + +def datetime_to_YMDH(dt: datetime.datetime) -> str: + """ + Description + ----------- + Translate a datetime object to 'YYYYmmddHH' format. + + Parameters + ---------- + dt : datetime.datetime + Datetime object to translate. + + Returns + ------- + str: str + Formatted string in 'YYYYmmddHH' format. + """ + try: + return dt.strftime('%Y%m%d%H') + except Exception: + raise Exception(f"Bad datetime: '{dt}'") + + +def datetime_to_YMD(dt: datetime.datetime) -> str: + """ + Description + ----------- + Translate a datetime object to 'YYYYmmdd' format. + + Parameters + ---------- + dt : datetime.datetime + Datetime object to translate. + + Returns + ------- + str: str + Formatted string in 'YYYYmmdd' format. + """ + try: + return dt.strftime('%Y%m%d') + except Exception: + raise Exception(f"Bad datetime: '{dt}'") + + +def datetime_to_JDAY(dt: datetime.datetime) -> str: + """ + Description + ----------- + Translate a datetime object to 'YYYYDOY' format. + + + Parameters + ---------- + dt : datetime.datetime + Datetime object to translate + + Returns + ------- + str: str + Formatted string in 'YYYYDOY' format. + """ + try: + return dt.strftime('%Y%j') + except Exception: + raise Exception(f"Bad datetime: '{dt}'") + + +def timedelta_to_HMS(td: datetime.timedelta) -> str: + """ + Description + ----------- + Translate a timedelta object to 'HH:MM:SS' format. + + Parameters + ---------- + td : datetime.timedelta + Timedelta object to translate. + + Returns + ------- + str: str + Formatted string in 'HH:MM:SS' format. + """ + try: + hours, remainder = divmod(int(td.total_seconds()), 3600) + minutes, seconds = divmod(remainder, 60) + return f"{hours:02d}:{minutes:02d}:{seconds:02d}" + except Exception: + raise Exception(f"Bad timedelta: '{td}'") + + +def strftime(dt: datetime.datetime, fmt: str) -> str: + """ + Return a formatted string from a datetime object. + """ + try: + return dt.strftime(fmt) + except Exception: + raise Exception(f"Bad datetime (format): '{dt} ({fmt})'") + + +def strptime(dtstr: str, fmt: str) -> datetime.datetime: + """ + Description + ----------- + Translate a formatted string into datetime object. + + Parameters + ---------- + dtstr : str + Datetime string to translate. + fmt : str + Datetime string format. + + Returns + ------- + datetime.datetime: datetime.datetime + Datetime object. + """ + try: + return datetime.datetime.strptime(dtstr, fmt) + except Exception: + raise Exception(f"Bad datetime string (format): '{dtstr} ({fmt})'") + + +def to_isotime(dt: datetime.datetime) -> str: + """ + Description + ----------- + Return a ISO formatted '%Y-%m-%dT%H:%M:%SZ' string from a datetime object. + + Parameters + ---------- + dt : datetime.datetime + Datetime object to format. + + Returns + ------- + str: str + Formatted string in ISO format. + """ + return strftime(dt, '%Y-%m-%dT%H:%M:%SZ') + + +def to_fv3time(dt: datetime.datetime) -> str: + """ + Description + ----------- + Return a FV3 formatted string from a datetime object. + + Parameters + ---------- + dt : datetime.datetime + Datetime object to format. + + Returns + ------- + str: str + Formatted string in FV3 format. + """ + return strftime(dt, '%Y%m%d.%H%M%S') + + +def add_to_datetime(dt: datetime.datetime, td: datetime.timedelta) -> datetime.datetime: + """ + Description + ----------- + Adds a timedelta to a datetime object. + + Parameters + ---------- + dt : datetime.datetime + Datetime object to add to. + td : datetime.timedelta + Timedelta object to add. + + Returns + ------- + datetime.datetime + """ + return dt + td + + +def add_to_timedelta(td1, td2): + """ + Description + ----------- + Adds two timedelta objects. + + Parameters + ---------- + td1 : datetime.timedelta + First timedelta object to add. + td2 : datetime.timedelta + Second timedelta object to add. + + Returns + ------- + datetime.timedelta + """ + return td1 + td2 + + +to_YMDH = datetime_to_YMDH +to_YMD = datetime_to_YMD +to_JDAY = datetime_to_JDAY +to_julian = datetime_to_JDAY diff --git a/ush/python/pygw/src/pygw/yaml_file.py b/ush/python/pygw/src/pygw/yaml_file.py new file mode 100644 index 0000000000..89cd1e2ec0 --- /dev/null +++ b/ush/python/pygw/src/pygw/yaml_file.py @@ -0,0 +1,208 @@ +import os +import re +import json +import yaml +import datetime +from typing import Any, Dict +from .attrdict import AttrDict +from .template import TemplateConstants, Template +from .jinja import Jinja + +__all__ = ['YAMLFile', 'parse_yaml', 'parse_yamltmpl', 'parse_j2yaml', + 'save_as_yaml', 'dump_as_yaml', 'vanilla_yaml'] + + +class YAMLFile(AttrDict): + """ + Reads a YAML file as an AttrDict and recursively converts + nested dictionaries into AttrDict. + This is the entry point for all YAML files. + """ + + def __init__(self, path=None, data=None): + super().__init__() + + if path and data: + print("Ignoring 'data' and using 'path' argument") + + config = None + if path is not None: + config = parse_yaml(path=path) + elif data is not None: + config = parse_yaml(data=data) + + if config is not None: + self.update(config) + + def save(self, target): + save_as_yaml(self, target) + + def dump(self): + return dump_as_yaml(self) + + def as_dict(self): + return vanilla_yaml(self) + + +def save_as_yaml(data, target): + # specifies a wide file so that long strings are on one line. + with open(target, 'w') as fh: + yaml.safe_dump(vanilla_yaml(data), fh, + width=100000, sort_keys=False) + + +def dump_as_yaml(data): + return yaml.dump(vanilla_yaml(data), + width=100000, sort_keys=False) + + +def parse_yaml(path=None, data=None, + encoding='utf-8', loader=yaml.SafeLoader): + """ + Load a yaml configuration file and resolve any environment variables + The environment variables must have !ENV before them and be in this format + to be parsed: ${VAR_NAME}. + E.g.: + database: + host: !ENV ${HOST} + port: !ENV ${PORT} + app: + log_path: !ENV '/var/${LOG_PATH}' + something_else: !ENV '${AWESOME_ENV_VAR}/var/${A_SECOND_AWESOME_VAR}' + :param str path: the path to the yaml file + :param str data: the yaml data itself as a stream + :param Type[yaml.loader] loader: Specify which loader to use. Defaults to yaml.SafeLoader + :param str encoding: the encoding of the data if a path is specified, defaults to utf-8 + :return: the dict configuration + :rtype: Dict[str, Any] + + Adopted from: + https://dev.to/mkaranasou/python-yaml-configuration-with-environment-variables-parsing-2ha6 + """ + # define tags + envtag = '!ENV' + inctag = '!INC' + # pattern for global vars: look for ${word} + pattern = re.compile(r'.*?\${(\w+)}.*?') + loader = loader or yaml.SafeLoader + + # the envtag will be used to mark where to start searching for the pattern + # e.g. somekey: !ENV somestring${MYENVVAR}blah blah blah + loader.add_implicit_resolver(envtag, pattern, None) + loader.add_implicit_resolver(inctag, pattern, None) + + def expand_env_variables(line): + match = pattern.findall(line) # to find all env variables in line + if match: + full_value = line + for g in match: + full_value = full_value.replace( + f'${{{g}}}', os.environ.get(g, f'${{{g}}}') + ) + return full_value + return line + + def constructor_env_variables(loader, node): + """ + Extracts the environment variable from the node's value + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: the parsed string that contains the value of the environment + variable + """ + value = loader.construct_scalar(node) + return expand_env_variables(value) + + def constructor_include_variables(loader, node): + """ + Extracts the environment variable from the node's value + :param yaml.Loader loader: the yaml loader + :param node: the current node in the yaml + :return: the content of the file to be included + """ + value = loader.construct_scalar(node) + value = expand_env_variables(value) + expanded = parse_yaml(value) + return expanded + + loader.add_constructor(envtag, constructor_env_variables) + loader.add_constructor(inctag, constructor_include_variables) + + if path: + with open(path, 'r', encoding=encoding) as conf_data: + return yaml.load(conf_data, Loader=loader) + elif data: + return yaml.load(data, Loader=loader) + else: + raise ValueError( + "Either a path or data should be defined as input") + + +def vanilla_yaml(ctx): + """ + Transform an input object of complex type as a plain type + """ + if isinstance(ctx, AttrDict): + return {kk: vanilla_yaml(vv) for kk, vv in ctx.items()} + elif isinstance(ctx, list): + return [vanilla_yaml(vv) for vv in ctx] + elif isinstance(ctx, datetime.datetime): + return ctx.strftime("%Y-%m-%dT%H:%M:%SZ") + else: + return ctx + + +def parse_j2yaml(path: str, data: Dict) -> Dict[str, Any]: + """ + Description + ----------- + Load a compound jinja2-templated yaml file and resolve any templated variables. + The jinja2 templates are first resolved and then the rendered template is parsed as a yaml. + Finally, any remaining $( ... ) templates are resolved + + Parameters + ---------- + path : str + the path to the yaml file + data : Dict[str, Any], optional + the context for jinja2 templating + Returns + ------- + Dict[str, Any] + the dict configuration + """ + jenv = Jinja(path, data) + yaml_file = jenv.render + yaml_dict = YAMLFile(data=yaml_file) + yaml_dict = Template.substitute_structure( + yaml_dict, TemplateConstants.DOLLAR_PARENTHESES, data.get) + + # If the input yaml file included other yamls with jinja2 templates, then we need to re-parse the jinja2 templates in them + jenv2 = Jinja(json.dumps(yaml_dict, indent=4), data) + yaml_file2 = jenv2.render + yaml_dict = YAMLFile(data=yaml_file2) + + return yaml_dict + + +def parse_yamltmpl(path: str, data: Dict = None) -> Dict[str, Any]: + """ + Description + ----------- + Load a simple templated yaml file and then resolve any templated variables defined as $( ... ) + Parameters + ---------- + path : str + the path to the yaml file + data : Dict[str, Any], optional + the context for pygw.Template templating + Returns + ------- + Dict[str, Any] + the dict configuration + """ + yaml_dict = YAMLFile(path=path) + if data is not None: + yaml_dict = Template.substitute_structure(yaml_dict, TemplateConstants.DOLLAR_PARENTHESES, data.get) + + return yaml_dict diff --git a/ush/python/pygw/src/tests/__init__.py b/ush/python/pygw/src/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/ush/python/pygw/src/tests/test-files/test_schema.yaml b/ush/python/pygw/src/tests/test-files/test_schema.yaml new file mode 100644 index 0000000000..741313118b --- /dev/null +++ b/ush/python/pygw/src/tests/test-files/test_schema.yaml @@ -0,0 +1,21 @@ +# A mandatory boolean valued variable. +variable1: + optional: False + type: bool + +# An optional complex valued variable. +variable2: + optional: True + type: int + default: 2 + +# A mandatory string variable. +variable3: + type: str + +# The default value should be ignored here as it is not optional; the +# default value is meaningless. +variable4: + type: float + optional: False + default: 10.0 diff --git a/ush/python/pygw/src/tests/test_configuration.py b/ush/python/pygw/src/tests/test_configuration.py new file mode 100644 index 0000000000..e83c2755b8 --- /dev/null +++ b/ush/python/pygw/src/tests/test_configuration.py @@ -0,0 +1,172 @@ +import os +import pytest +from datetime import datetime + +from pygw.configuration import Configuration, cast_as_dtype + +file0 = """#!/bin/bash +export SOME_ENVVAR1="${USER}" +export SOME_LOCALVAR1="myvar1" +export SOME_LOCALVAR2="myvar2.0" +export SOME_LOCALVAR3="myvar3_file0" +export SOME_PATH1="/path/to/some/directory" +export SOME_PATH2="/path/to/some/file" +export SOME_DATE1="20221225" +export SOME_DATE2="2022122518" +export SOME_DATE3="202212251845" +export SOME_INT1=3 +export SOME_INT2=15 +export SOME_INT3=-999 +export SOME_FLOAT1=0.2 +export SOME_FLOAT2=3.5 +export SOME_FLOAT3=-9999. +export SOME_BOOL1=YES +export SOME_BOOL2=.true. +export SOME_BOOL3=.T. +export SOME_BOOL4=NO +export SOME_BOOL5=.false. +export SOME_BOOL6=.F. +""" + +file1 = """#!/bin/bash +export SOME_LOCALVAR3="myvar3_file1" +export SOME_LOCALVAR4="myvar4" +export SOME_BOOL7=.TRUE. +""" + +file0_dict = { + 'SOME_ENVVAR1': os.environ['USER'], + 'SOME_LOCALVAR1': "myvar1", + 'SOME_LOCALVAR2': "myvar2.0", + 'SOME_LOCALVAR3': "myvar3_file0", + 'SOME_PATH1': "/path/to/some/directory", + 'SOME_PATH2': "/path/to/some/file", + 'SOME_DATE1': datetime(2022, 12, 25, 0, 0, 0), + 'SOME_DATE2': datetime(2022, 12, 25, 18, 0, 0), + 'SOME_DATE3': datetime(2022, 12, 25, 18, 45, 0), + 'SOME_INT1': 3, + 'SOME_INT2': 15, + 'SOME_INT3': -999, + 'SOME_FLOAT1': 0.2, + 'SOME_FLOAT2': 3.5, + 'SOME_FLOAT3': -9999., + 'SOME_BOOL1': True, + 'SOME_BOOL2': True, + 'SOME_BOOL3': True, + 'SOME_BOOL4': False, + 'SOME_BOOL5': False, + 'SOME_BOOL6': False +} + +file1_dict = { + 'SOME_LOCALVAR3': "myvar3_file1", + 'SOME_LOCALVAR4': "myvar4", + 'SOME_BOOL7': True +} + +str_dtypes = [ + ('HOME', 'HOME'), +] + +int_dtypes = [ + ('1', 1), +] + +float_dtypes = [ + ('1.0', 1.0), +] + +bool_dtypes = [ + ('y', True), ('n', False), + ('Y', True), ('N', False), + ('yes', True), ('no', False), + ('Yes', True), ('No', False), + ('YES', True), ('NO', False), + ('t', True), ('f', False), + ('T', True), ('F', False), + ('true', True), ('false', False), + ('True', True), ('False', False), + ('TRUE', True), ('FALSE', False), + ('.t.', True), ('.f.', False), + ('.T.', True), ('.F.', False), +] + +datetime_dtypes = [ + ('20221215', datetime(2022, 12, 15, 0, 0, 0)), + ('2022121518', datetime(2022, 12, 15, 18, 0, 0)), + ('2022121518Z', datetime(2022, 12, 15, 18, 0, 0)), + ('20221215T1830', datetime(2022, 12, 15, 18, 30, 0)), + ('20221215T1830Z', datetime(2022, 12, 15, 18, 30, 0)), +] + + +def evaluate(dtypes): + for pair in dtypes: + print(f"Test: '{pair[0]}' ==> {pair[1]}") + assert pair[1] == cast_as_dtype(pair[0]) + + +def test_cast_as_dtype_str(): + evaluate(str_dtypes) + + +def test_cast_as_dtype_int(): + evaluate(int_dtypes) + + +def test_cast_as_dtype_float(): + evaluate(float_dtypes) + + +def test_cast_as_dtype_bool(): + evaluate(bool_dtypes) + + +def test_cast_as_dtype_datetimes(): + evaluate(datetime_dtypes) + + +@pytest.fixture +def create_configs(tmp_path): + + file_path = tmp_path / 'config.file0' + with open(file_path, 'w') as fh: + fh.write(file0) + + file_path = tmp_path / 'config.file1' + with open(file_path, 'w') as fh: + fh.write(file1) + + +def test_configuration_config_dir(tmp_path, create_configs): + cfg = Configuration(tmp_path) + assert cfg.config_dir == tmp_path + + +@pytest.mark.skip(reason="fails in GH runner, passes on localhost") +def test_configuration_config_files(tmp_path, create_configs): + cfg = Configuration(tmp_path) + config_files = [str(tmp_path / 'config.file0'), str(tmp_path / 'config.file1')] + assert config_files == cfg.config_files + + +def test_find_config(tmp_path, create_configs): + cfg = Configuration(tmp_path) + file0 = cfg.find_config('config.file0') + assert str(tmp_path / 'config.file0') == file0 + + +@pytest.mark.skip(reason="fails in GH runner, passes on localhost") +def test_parse_config1(tmp_path, create_configs): + cfg = Configuration(tmp_path) + f0 = cfg.parse_config('config.file0') + assert file0_dict == f0 + + +@pytest.mark.skip(reason="fails in GH runner, passes on localhost") +def test_parse_config2(tmp_path, create_configs): + cfg = Configuration(tmp_path) + ff = cfg.parse_config(['config.file0', 'config.file1']) + ff_dict = file0_dict.copy() + ff_dict.update(file1_dict) + assert ff_dict == ff diff --git a/ush/python/pygw/src/tests/test_exceptions.py b/ush/python/pygw/src/tests/test_exceptions.py new file mode 100644 index 0000000000..79f3e4f1ec --- /dev/null +++ b/ush/python/pygw/src/tests/test_exceptions.py @@ -0,0 +1,35 @@ +import pytest + +from pygw.exceptions import WorkflowException + +# ---- + + +class TestError(WorkflowException): + """ + Description + ----------- + + This is the base-class for exceptions encountered within the + pygw/errors unit-tests module; it is a sub-class of Error. + + """ + +# ---- + + +def test_errors() -> None: + """ + Description + ----------- + + This function provides a unit test for the errors module. + + """ + + # Raise the base-class exception. + with pytest.raises(Exception): + msg = "Testing exception raise." + raise TestError(msg=msg) + + assert True diff --git a/ush/python/pygw/src/tests/test_executable.py b/ush/python/pygw/src/tests/test_executable.py new file mode 100644 index 0000000000..4c0e584fab --- /dev/null +++ b/ush/python/pygw/src/tests/test_executable.py @@ -0,0 +1,60 @@ +import os +from pathlib import Path +import pytest +from pygw.executable import Executable, which, CommandNotFoundError + + +script = """#!/bin/bash +echo ${USER} +""" + + +def test_executable(tmp_path): + """ + Tests the class `Executable` + Parameters: + ----------- + tmp_path : Path + temporary path created by pytest + """ + whoami = os.environ['USER'] + + test_file = tmp_path / 'whoami.x' + Path(test_file).touch(mode=0o755) + with open(test_file, 'w') as fh: + fh.write(script) + + cmd = Executable(str(test_file)) + assert cmd.exe == [str(test_file)] + + stdout_file = tmp_path / 'stdout' + stderr_file = tmp_path / 'stderr' + cmd(output=str(stdout_file), error=str(stderr_file)) + with open(str(stdout_file)) as fh: + assert fh.read() == whoami + '\n' + + +def test_which(tmpdir): + """ + Tests the `which()` function. + `which` should return `None` if the executable is not found + Parameters + ---------- + tmpdir : Path + path to a temporary directory created by pytest + """ + os.environ["PATH"] = str(tmpdir) + assert which('test.x') is None + + with pytest.raises(CommandNotFoundError): + which('test.x', required=True) + + path = str(tmpdir.join("test.x")) + + # create a test.x executable in the tmpdir + with tmpdir.as_cwd(): + Path('test.x').touch(mode=0o755) + + exe = which("test.x") + assert exe is not None + assert exe.path == path diff --git a/ush/python/pygw/src/tests/test_file_utils.py b/ush/python/pygw/src/tests/test_file_utils.py new file mode 100644 index 0000000000..684c76b650 --- /dev/null +++ b/ush/python/pygw/src/tests/test_file_utils.py @@ -0,0 +1,66 @@ +import os +from pygw.file_utils import FileHandler + + +def test_mkdir(tmp_path): + """ + Test for creating directories: + Parameters + ---------- + tmp_path - pytest fixture + """ + + dir_path = tmp_path / 'my_test_dir' + d1 = f'{dir_path}1' + d2 = f'{dir_path}2' + d3 = f'{dir_path}3' + + # Create config object for FileHandler + config = {'mkdir': [d1, d2, d3]} + + # Create d1, d2, d3 + FileHandler(config).sync() + + # Check if d1, d2, d3 were indeed created + for dd in config['mkdir']: + assert os.path.exists(dd) + + +def test_copy(tmp_path): + """ + Test for copying files: + Parameters + ---------- + tmp_path - pytest fixture + """ + + input_dir_path = tmp_path / 'my_input_dir' + + # Create the input directory + config = {'mkdir': [input_dir_path]} + FileHandler(config).sync() + + # Put empty files in input_dir_path + src_files = [input_dir_path / 'a.txt', input_dir_path / 'b.txt'] + for ff in src_files: + ff.touch() + + # Create output_dir_path and expected file names + output_dir_path = tmp_path / 'my_output_dir' + config = {'mkdir': [output_dir_path]} + FileHandler(config).sync() + dest_files = [output_dir_path / 'a.txt', output_dir_path / 'bb.txt'] + + copy_list = [] + for src, dest in zip(src_files, dest_files): + copy_list.append([src, dest]) + + # Create config object for FileHandler + config = {'copy': copy_list} + + # Copy input files to output files + FileHandler(config).sync() + + # Check if files were indeed copied + for ff in dest_files: + assert os.path.isfile(ff) diff --git a/ush/python/pygw/src/tests/test_jinja.py b/ush/python/pygw/src/tests/test_jinja.py new file mode 100644 index 0000000000..10749515ab --- /dev/null +++ b/ush/python/pygw/src/tests/test_jinja.py @@ -0,0 +1,37 @@ +import pytest + +from datetime import datetime +from pygw.jinja import Jinja +from pygw.timetools import to_isotime + +current_date = datetime.now() +j2tmpl = """Hello {{ name }}! {{ greeting }} It is: {{ current_date | to_isotime }}""" + + +@pytest.fixture +def create_template(tmp_path): + file_path = tmp_path / 'template.j2' + with open(file_path, 'w') as fh: + fh.write(j2tmpl) + + +def test_render_stream(): + data = {"name": "John"} + j = Jinja(j2tmpl, data, allow_missing=True) + assert j.render == "Hello John! {{ greeting }} It is: {{ current_date }}" + + data = {"name": "Jane", "greeting": "How are you?", "current_date": current_date} + j = Jinja(j2tmpl, data, allow_missing=False) + assert j.render == f"Hello Jane! How are you? It is: {to_isotime(current_date)}" + + +def test_render_file(tmp_path, create_template): + + file_path = tmp_path / 'template.j2' + data = {"name": "John"} + j = Jinja(str(file_path), data, allow_missing=True) + assert j.render == "Hello John! {{ greeting }} It is: {{ current_date }}" + + data = {"name": "Jane", "greeting": "How are you?", "current_date": current_date} + j = Jinja(str(file_path), data, allow_missing=False) + assert j.render == f"Hello Jane! How are you? It is: {to_isotime(current_date)}" diff --git a/ush/python/pygw/src/tests/test_logger.py b/ush/python/pygw/src/tests/test_logger.py new file mode 100644 index 0000000000..a9b4504d57 --- /dev/null +++ b/ush/python/pygw/src/tests/test_logger.py @@ -0,0 +1,67 @@ +from pygw.logger import Logger +from pygw.logger import logit + +level = 'debug' +number_of_log_msgs = 5 +reference = {'debug': "Logging test has started", + 'info': "Logging to 'logger.log' in the script dir", + 'warning': "This is my last warning, take heed", + 'error': "This is an error", + 'critical': "He's dead, She's dead. They are all dead!"} + + +def test_logger(tmp_path): + """Test log file""" + + logfile = tmp_path / "logger.log" + + try: + log = Logger('test_logger', level=level, logfile_path=logfile, colored_log=True) + log.debug(reference['debug']) + log.info(reference['info']) + log.warning(reference['warning']) + log.error(reference['error']) + log.critical(reference['critical']) + except Exception as e: + raise AssertionError(f'logging failed as {e}') + + # Make sure log to file created messages + try: + with open(logfile, 'r') as fh: + log_msgs = fh.readlines() + except Exception as e: + raise AssertionError(f'failed reading log file as {e}') + + # Ensure number of messages are same + log_msgs_in_logfile = len(log_msgs) + assert log_msgs_in_logfile == number_of_log_msgs + + # Ensure messages themselves are same + for count, line in enumerate(log_msgs): + lev = line.split('-')[3].strip().lower() + message = line.split(':')[-1].strip() + assert reference[lev] == message + + +def test_logit(tmp_path): + + logger = Logger('test_logit', level=level, colored_log=True) + + @logit(logger) + def add(x, y): + return x + y + + @logit(logger) + def usedict(n, j=0, k=1): + return n + j + k + + @logit(logger, 'example') + def spam(): + print('Spam!') + + add(2, 3) + usedict(2, 3) + usedict(2, k=3) + spam() + + assert True diff --git a/ush/python/pygw/src/tests/test_schema.py b/ush/python/pygw/src/tests/test_schema.py new file mode 100644 index 0000000000..220b9866a9 --- /dev/null +++ b/ush/python/pygw/src/tests/test_schema.py @@ -0,0 +1,82 @@ +""" +Description +----------- + +Unit-tests for `pygw.schema`. +""" + +import os +import pytest +from pygw import schema +from pygw.yaml_file import parse_yaml +from pygw.schema import SchemaError +from pygw.configuration import cast_strdict_as_dtypedict + + +# Define the path to the YAML-formatted file containing the schema +# attributes. +# yaml_path = os.path.join(os.getcwd(), "tests", +# "test-files", "test_schema.yaml") +# data = parse_yaml(path=yaml_path) +@pytest.mark.skip(reason="disable till the developer fixes the test") +def test_build_schema(): + """ + Description + ----------- + + This function tests the `pygw.schema.build_schema` function. + + """ + + # Test that the schema can be defined. + assert schema.build_schema(data=data) + + +@pytest.mark.skip(reason="disable till the developer fixes the test") +def test_validate_schema(): + """ + Description + ----------- + + This function tests various application configurations (i.e., + `data_in`) for various schema validation applications. + + """ + + # Define the schema. + schema_dict = schema.build_schema(data=data) + + # Test that the schema validates and returns a the dictionary + # passed; this unit-test should pass. + data_in = { + "variable1": False, + "variable2": 1, + "variable3": "hello world", + "variable4": 10.0, + } + data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) + assert True + assert data_in == data_out + + # Test that optional values are updated with defaults. + del data_in["variable2"] + data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) + assert True + + # This unit-test should raise a `SchemaError` exception in order + # to pass. + data_in["variable2"] = "I **should** fail." + try: + data_out = schema.validate_schema( + schema_dict=schema_dict, data=data_in) + except SchemaError: + assert True + + # This unit-test passes the full environment, including `data_in`, + # to be validated; this tests the `ignore_extra_keys` attribute; + # this unit-test should pass. + del data_in["variable2"] + data_in = {**cast_strdict_as_dtypedict(os.environ), **data_in} + data_out = schema.validate_schema(schema_dict=schema_dict, data=data_in) + assert True + assert data_in == data_out diff --git a/ush/python/pygw/src/tests/test_template.py b/ush/python/pygw/src/tests/test_template.py new file mode 100644 index 0000000000..f6d594b2d9 --- /dev/null +++ b/ush/python/pygw/src/tests/test_template.py @@ -0,0 +1,147 @@ +import os +from pygw.template import TemplateConstants, Template + + +def test_substitute_string_from_dict(): + """ + Substitute with ${v} + """ + template = '${greeting} to ${the_world}' + dictionary = { + 'greeting': 'Hello', + 'the_world': 'the world' + } + final = 'Hello to the world' + assert Template.substitute_structure(template, + TemplateConstants.DOLLAR_CURLY_BRACE, dictionary.get) == final + + +def test_substitute_string_from_dict_paren(): + """ + Substitute with $(v) + """ + template = '$(greeting) to $(the_world)' + dictionary = { + 'greeting': 'Hello', + 'the_world': 'the world' + } + final = 'Hello to the world' + assert Template.substitute_structure(template, + TemplateConstants.DOLLAR_PARENTHESES, dictionary.get) == final + + +def test_assign_string_from_dict_paren(): + """ + Substitute with $(v) should replace with the actual object + """ + template = '$(greeting)' + dictionary = { + 'greeting': { + 'a': 1, + 'b': 2 + } + } + assert Template.substitute_structure(template, + TemplateConstants.DOLLAR_PARENTHESES, + dictionary.get) == dictionary['greeting'] + + +def test_substitute_string_from_dict_double_curly(): + """ + Substitute with {{v}} + """ + template = '{{greeting}} to {{the_world}}' + dictionary = { + 'greeting': 'Hello', + 'the_world': 'the world' + } + final = 'Hello to the world' + assert Template.substitute_structure(template, + TemplateConstants.DOUBLE_CURLY_BRACES, + dictionary.get) == final + + +def test_substitute_string_from_dict_at_square(): + """ + Substitute with @[v] + """ + template = '@[greeting] to @[the_world]' + dictionary = { + 'greeting': 'Hello', + 'the_world': 'the world' + } + final = 'Hello to the world' + assert Template.substitute_structure(template, + TemplateConstants.AT_SQUARE_BRACES, + dictionary.get) == final + + +def test_substitute_string_from_dict_at_carrots(): + """ + Substitute with @ + """ + template = '@ to @' + dictionary = { + 'greeting': 'Hello', + 'the_world': 'the world' + } + final = 'Hello to the world' + assert Template.substitute_structure(template, + TemplateConstants.AT_ANGLE_BRACKETS, + dictionary.get) == final + + +def test_substitute_string_from_environment(): + """ + Substitute from environment + """ + template = '${GREETING} to ${THE_WORLD}' + os.environ['GREETING'] = 'Hello' + os.environ['THE_WORLD'] = 'the world' + final = 'Hello to the world' + assert Template.substitute_structure_from_environment(template) == final + + +def test_substitute_with_dependencies(): + input = { + 'root': '/home/user', + 'config_file': 'config.yaml', + 'config': '$(root)/config/$(config_file)', + 'greeting': 'hello $(world)', + 'world': 'world', + 'complex': '$(dictionary)', + 'dictionary': { + 'a': 1, + 'b': 2 + }, + 'dd': {'2': 'a', '1': 'b'}, + 'ee': {'3': 'a', '1': 'b'}, + 'ff': {'4': 'a', '1': 'b $(greeting)'}, + 'host': { + 'name': 'xenon', + 'config': '$(root)/hosts', + 'config_file': '$(config)/$(name).config.yaml', + 'proxy2': { + 'config': '$(root)/$(name).$(greeting).yaml', + 'list': [['$(root)/$(name)', 'toto.$(name).$(greeting)'], '$(config_file)'] + } + } + } + output = {'complex': {'a': 1, 'b': 2}, + 'config': '/home/user/config/config.yaml', + 'config_file': 'config.yaml', + 'dd': {'1': 'b', '2': 'a'}, + 'dictionary': {'a': 1, 'b': 2}, + 'ee': {'1': 'b', '3': 'a'}, + 'ff': {'1': 'b hello world', '4': 'a'}, + 'greeting': 'hello world', + 'host': {'config': '/home/user/hosts', + 'config_file': '/home/user/config/config.yaml/xenon.config.yaml', + 'name': 'xenon', + 'proxy2': {'config': '/home/user/xenon.hello world.yaml', + 'list': [['/home/user/xenon', 'toto.xenon.hello world'], + 'config.yaml']}}, + 'root': '/home/user', + 'world': 'world'} + + assert Template.substitute_with_dependencies(input, input, TemplateConstants.DOLLAR_PARENTHESES) == output diff --git a/ush/python/pygw/src/tests/test_timetools.py b/ush/python/pygw/src/tests/test_timetools.py new file mode 100644 index 0000000000..f7e2cfd2ce --- /dev/null +++ b/ush/python/pygw/src/tests/test_timetools.py @@ -0,0 +1,80 @@ +from datetime import datetime, timedelta +from pygw.timetools import * + +current_date = datetime.now() + + +def test_to_datetime(): + + assert to_datetime('20220314') == datetime(2022, 3, 14) + assert to_datetime('2022031412') == datetime(2022, 3, 14, 12) + assert to_datetime('202203141230') == datetime(2022, 3, 14, 12, 30) + assert to_datetime('2022-03-14') == datetime(2022, 3, 14) + assert to_datetime('2022-03-14T12Z') == datetime(2022, 3, 14, 12) + assert to_datetime('2022-03-14T12:30Z') == datetime(2022, 3, 14, 12, 30) + assert to_datetime('2022-03-14T12:30:45') == datetime(2022, 3, 14, 12, 30, 45) + assert to_datetime('2022-03-14T12:30:45Z') == datetime(2022, 3, 14, 12, 30, 45) + + +def test_to_timedelta(): + assert to_timedelta('2d3H4M5S') == timedelta(days=2, hours=3, minutes=4, seconds=5) + assert to_timedelta('-3H15M') == timedelta(hours=-3, minutes=-15) + assert to_timedelta('1:30:45') == timedelta(hours=1, minutes=30, seconds=45) + assert to_timedelta('5 days, 12:30:15') == timedelta(days=5, hours=12, minutes=30, seconds=15) + + +def test_datetime_to_ymdh(): + assert datetime_to_YMDH(current_date) == current_date.strftime('%Y%m%d%H') + + +def test_datetime_to_ymd(): + assert datetime_to_YMD(current_date) == current_date.strftime('%Y%m%d') + + +def test_timedelta_to_hms(): + td = timedelta(hours=5, minutes=39, seconds=56) + assert timedelta_to_HMS(td) == '05:39:56' + td = timedelta(days=4, hours=5, minutes=39, seconds=56) + assert timedelta_to_HMS(td) == '101:39:56' + + +def test_strftime(): + assert strftime(current_date, '%Y%m%d') == current_date.strftime('%Y%m%d') + assert strftime(current_date, '%Y%m%d %H') == current_date.strftime('%Y%m%d %H') + + +def test_strptime(): + assert strptime(current_date.strftime('%Y%m%d'), '%Y%m%d') == \ + datetime.strptime(current_date.strftime('%Y%m%d'), '%Y%m%d') + + +def test_to_isotime(): + assert to_isotime(current_date) == current_date.strftime('%Y-%m-%dT%H:%M:%SZ') + + +def test_to_fv3time(): + assert to_fv3time(current_date) == current_date.strftime('%Y%m%d.%H%M%S') + + +def test_to_julian(): + assert to_julian(current_date) == current_date.strftime('%Y%j') + + +def test_add_to_timedelta(): + assert add_to_timedelta(timedelta(days=1), timedelta(hours=3)) == \ + timedelta(days=1, hours=3) + assert add_to_timedelta(timedelta(hours=5, minutes=30), timedelta(minutes=15)) == \ + timedelta(hours=5, minutes=45) + assert add_to_timedelta(timedelta(seconds=45), timedelta(milliseconds=500)) == \ + timedelta(seconds=45, milliseconds=500) + + +def test_add_to_datetime(): + dt = datetime(2023, 3, 14, 12, 0, 0) + td = timedelta(days=1, hours=6) + negative_td = timedelta(days=-1, hours=-6) + zero_td = timedelta() + + assert add_to_datetime(dt, td) == datetime(2023, 3, 15, 18, 0, 0) + assert add_to_datetime(dt, negative_td) == datetime(2023, 3, 13, 6, 0, 0) + assert add_to_datetime(dt, zero_td) == datetime(2023, 3, 14, 12, 0, 0) diff --git a/ush/python/pygw/src/tests/test_yaml_file.py b/ush/python/pygw/src/tests/test_yaml_file.py new file mode 100644 index 0000000000..d01eb154b2 --- /dev/null +++ b/ush/python/pygw/src/tests/test_yaml_file.py @@ -0,0 +1,97 @@ +import os +import pytest +from datetime import datetime +from pygw.yaml_file import YAMLFile, parse_yamltmpl, parse_j2yaml, save_as_yaml, dump_as_yaml + +host_yaml = """ +host: + hostname: test_host + host_user: !ENV ${USER} +""" + +conf_yaml = """ +config: + config_file: !ENV ${TMP_PATH}/config.yaml + user: !ENV ${USER} + host_file: !INC ${TMP_PATH}/host.yaml +""" + +tmpl_yaml = """ +config: + config_file: !ENV ${TMP_PATH}/config.yaml + user: !ENV ${USER} + host_file: !INC ${TMP_PATH}/host.yaml +tmpl: + cdate: '{{PDY}}{{cyc}}' + homedir: /home/$(user) +""" +# Note the quotes ' ' around {{ }}. These quotes are necessary for yaml otherwise yaml will fail parsing + +j2tmpl_yaml = """ +config: + config_file: !ENV ${TMP_PATH}/config.yaml + user: !ENV ${USER} + host_file: !INC ${TMP_PATH}/host.yaml +tmpl: + cdate: '{{ current_cycle | to_YMD }}{{ current_cycle | strftime('%H') }}' + homedir: /home/$(user) +""" + + +@pytest.fixture +def create_template(tmpdir): + """Create temporary templates for testing""" + tmpdir.join('host.yaml').write(host_yaml) + tmpdir.join('config.yaml').write(conf_yaml) + tmpdir.join('tmpl.yaml').write(tmpl_yaml) + tmpdir.join('j2tmpl.yaml').write(j2tmpl_yaml) + + +def test_yaml_file(tmp_path, create_template): + + # Set env. variable + os.environ['TMP_PATH'] = str(tmp_path) + conf = YAMLFile(path=str(tmp_path / 'config.yaml')) + + # Write out yaml file + yaml_out = tmp_path / 'config_output.yaml' + conf.save(yaml_out) + + # Read in the yaml file and compare w/ conf + yaml_in = YAMLFile(path=str(yaml_out)) + + assert yaml_in == conf + + +def test_yaml_file_with_templates(tmp_path, create_template): + + # Set env. variable + os.environ['TMP_PATH'] = str(tmp_path) + data = {'user': os.environ['USER']} + conf = parse_yamltmpl(path=str(tmp_path / 'tmpl.yaml'), data=data) + + # Write out yaml file + yaml_out = tmp_path / 'tmpl_output.yaml' + save_as_yaml(conf, yaml_out) + + # Read in the yaml file and compare w/ conf + yaml_in = YAMLFile(path=yaml_out) + + assert yaml_in == conf + + +def test_yaml_file_with_j2templates(tmp_path, create_template): + + # Set env. variable + os.environ['TMP_PATH'] = str(tmp_path) + data = {'user': os.environ['USER'], 'current_cycle': datetime.now()} + conf = parse_j2yaml(path=str(tmp_path / 'j2tmpl.yaml'), data=data) + + # Write out yaml file + yaml_out = tmp_path / 'j2tmpl_output.yaml' + save_as_yaml(conf, yaml_out) + + # Read in the yaml file and compare w/ conf + yaml_in = YAMLFile(path=yaml_out) + + assert yaml_in == conf diff --git a/ush/radmon_diag_ck.sh b/ush/radmon_diag_ck.sh new file mode 100755 index 0000000000..4045ddb2d5 --- /dev/null +++ b/ush/radmon_diag_ck.sh @@ -0,0 +1,174 @@ +#!/bin/bash + +#---------------------------------------------------------------- +# Check the contents of the radstat file and compare to +# the ${run}_radmon_satype.txt file. Report any missing +# or zero sized diag files. +# + + function usage { + echo "Usage: radmon_diag_ck.sh -rad radstat --sat satype --out output " + echo "" + echo " -r,--rad radstat file (required)" + echo " File name or path to radstat file." + echo "" + echo " -s,--sat satype file (required)" + echo " File name or path to satype file." + echo "" + echo " -o,--out output file name (required)" + echo " File name for missing diag file report." + } + + +echo "--> radmon_diag_ck.sh" + + +#-------------------------- +# Process input arguments +# + nargs=$# + if [[ $nargs -ne 6 ]]; then + usage + exit 1 + fi + + while [[ $# -ge 1 ]] + do + key="$1" + echo $key + + case $key in + -r|--rad) + radstat_file="$2" + shift # past argument + ;; + -s|--sat) + satype_file="$2" + shift # past argument + ;; + -o|--out) + output_file="$2" + shift # past argument + ;; + *) + #unspecified key + echo " unsupported key = $key" + ;; + esac + + shift + done + +# set -ax + + echo " radstat_file = ${radstat_file}" + echo " satype_file = ${satype_file}" + echo " output_file = ${output_file}" + + missing_diag="" + zero_len_diag="" + + #--------------------------------------------- + # get list of diag files in the radstat file + # + radstat_contents=`tar -tf ${radstat_file} | grep '_ges' | + gawk -F"diag_" '{print $2}' | + gawk -F"_ges" '{print $1}'` + + + #--------------------------------------------- + # load contents of satype_file into an array + # + satype_contents=`cat ${satype_file}` + + + #------------------------------------------------- + # compare $satype_contents and $radstat_contents + # report anything missing + # + for sat in $satype_contents; do + test=`echo $radstat_contents | grep $sat` + + if [[ ${#test} -le 0 ]]; then + missing_diag="${missing_diag} ${sat}" + fi + + done + + echo "" + echo "missing_diag = ${missing_diag}" + echo "" + + + #--------------------------------------------------------- + # Check for zero sized diag files. The diag files in + # the radstat file (which is a tar file) are gzipped. + # I find that 0 sized, gzipped file has a size of ~52 + # (I assume that's for header and block size). + # + # So for this check we'll assume anything in the radstat + # file with a size of > 1000 bytes is suspect. (That's + # overkill, 100 is probably sufficient, but I'm the + # nervous type.) So we'll extract, uncompress, and check + # the actual file size of those. Anything with an + # uncompressed size of 0 goes on the zero_len_diag list. + # + + # TODO Rewrite these array parsing commands to avoid using Bash's sloppy word splitting + # File sizes contain only digits and immediately precede the date + # shellcheck disable=SC2207 + sizes=($(tar -vtf ${radstat_file} --wildcards '*_ges*' | grep -P -o '(\d)+(?= \d{4}-\d{2}-\d{2})')) + # Filenames are the last group of non-whitespace characters + # shellcheck disable=SC2207 + filenames=($(tar -vtf ${radstat_file} --wildcards '*_ges*' | grep -P -o '\S+$')) + # shellcheck disable= + + + for file_num in "${!filenames[@]}"; do + file_name="${filenames[${file_num}]}" + file_size="${sizes[${file_num}]}" + + if (( file_size <= 1000 )); then + tar -xf "${radstat_file}" "${file_name}" + gunzip "${file_name}" + uz_file_name="${file_name%.*}" + uz_file_size=$(stat -c "%s" "${uz_file_name}") + + + if (( uz_file_size <= 0 )); then + # Remove leading diag_ + sat=${uz_file_name#diag_} + # Remove trailing _ges* + sat=${sat%_ges*} + + zero_len_diag="${zero_len_diag} ${sat}" + fi + + rm -f ${uz_file_name} + fi + + done + + echo "" + echo "zero_len_diag = ${zero_len_diag}" + echo "" + + + #----------------------------------------- + # Write results to $output_file + # + if [[ ${#zero_len_diag} -gt 0 ]]; then + for zld in ${zero_len_diag}; do + echo " Zero Length diagnostic file: $zld" >> $output_file + done + fi + + if [[ ${#missing_diag} -gt 0 ]]; then + for md in ${missing_diag}; do + echo " Missing diagnostic file : $md" >> $output_file + done + fi + + +echo "<-- radmon_diag_ck.sh" +exit diff --git a/ush/radmon_err_rpt.sh b/ush/radmon_err_rpt.sh new file mode 100755 index 0000000000..8561563d48 --- /dev/null +++ b/ush/radmon_err_rpt.sh @@ -0,0 +1,194 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_err_rpt.sh +# Script description: Compare the contents of error files from two different +# cycles. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script compares the contents of two error files from two different +# sets of radiance diagnostic files (which are an output from GSI runs). +# All unique satellite instrument/channel/region combinations that appear +# in both files are reported. +# +# This script is run as a child script of radmon_verf_time.sh. The parent +# script creates/copies the error files into a temporary working +# directory before invoking this script. +# +# +# Usage: radmon_err_rpt.sh file1 file2 type cycle1 cycle2 diag_rpt outfile +# +# Input script positional parameters: +# file1 obs, penalty, or channel error file +# required +# file2 obs, penalty, or channel error file +# required +# type type of error file +# choices are obs, pen, chan, or cnt; required +# cycle1 first cycle processing date +# yyyymmddcc format; required +# cycle2 second cycle processing date +# yyyymmddcc format; required +# diag_rpt diagnostic report text file +# required +# outfile output file name +# required +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +#################################################################### + +# Command line arguments. +file1=${1:-${file1:?}} +file2=${2:-${file2:?}} +type=${3:-${type:?}} +cycle1=${4:-${cycle1:?}} +cycle2=${5:-${cycle2:?}} +diag_rpt=${6:-${diag_rpt:?}} +outfile=${7:-${outfile:?}} + +# Directories +HOMEradmon=${HOMEradmon:-$(pwd)} + +# Other variables +err=0 +RADMON_SUFFIX=${RADMON_SUFFIX} + +have_diag_rpt=0 +if [[ -s $diag_rpt ]]; then + have_diag_rpt=1 +else + err=1 +fi +echo "have_diag_rpt = $have_diag_rpt" + +#----------------------------------------------------------------------------- +# read each line in the $file1 +# search $file2 for the same satname, channel, and region +# if same combination is in both files, add the values to the output file +# +{ while read myline; do + echo "myline = $myline" + bound="" + + echo $myline + satname=$(echo $myline | gawk '{print $1}') + channel=$(echo $myline | gawk '{print $3}') + region=$(echo $myline | gawk '{print $5}') + value1=$(echo $myline | gawk '{print $7}') + bound=$(echo $myline | gawk '{print $9}') + +# +# Check findings against diag_report. If the satellite/instrument is on the +# diagnostic report it means the diagnostic file file for the +# satelite/instrument is missing for this cycle, so skip any additional +# error checking for that source. Otherwise, evaluate as per normal. +# + + diag_match="" + diag_match_len=0 + + if [[ $have_diag_rpt == 1 ]]; then + diag_match=$(gawk "/$satname/" $diag_rpt) + diag_match_len=$(echo ${#diag_match}) + fi + + + if [[ $diag_match_len == 0 ]]; then + + if [[ $type == "chan" ]]; then + echo "looking for match for $satname and $channel" + { while read myline2; do + satname2=$(echo $myline2 | gawk '{print $1}') + channel2=$(echo $myline2 | gawk '{print $3}') + + if [[ $satname == $satname2 && $channel == $channel2 ]]; then + match="$satname channel= $channel" + echo "match from gawk = $match" + break; + else + match="" + fi + + done } < $file2 + + + else + match=$(gawk "/$satname/ && /channel= $channel / && /region= $region /" $file2) + echo match = $match + + match_len=$(echo ${#match}) + if [[ $match_len > 0 ]]; then + channel2=$(echo $match | gawk '{print $3}') + + if [[ $channel2 != $channel ]]; then + match="" + fi + fi + + fi + match_len=$(echo ${#match}) + + if [[ $match_len > 0 ]]; then + + value2=$(echo $match | gawk '{print $7}') + bound2=$(echo $match | gawk '{print $9}') + + if [[ $type == "chan" ]]; then + tmpa=" $satname channel= $channel" + tmpb="" + + elif [[ $type == "pen" ]]; then + tmpa="$satname channel= $channel region= $region" + tmpb="$cycle1 $value1 $bound" + + elif [[ $type == "cnt" ]]; then + tmpa="$satname channel= $channel region= $region" + tmpb="$cycle1 $value1 $bound" + + else + tmpa="$satname channel= $channel region= $region" + tmpb="$cycle1: $type= $value1" + fi + + line1="$tmpa $tmpb" + echo "$line1" >> $outfile + + if [[ $type != "chan" ]]; then + tmpc=$(echo $tmpa |sed 's/[a-z]/ /g' | sed 's/[0-9]/ /g' | sed 's/=/ /g' | sed 's/_/ /g' | sed 's/-/ /g') + + if [[ $type == "pen" || $type == "cnt" ]]; then + line2=" $tmpc $cycle2 $value2 $bound2" + else + line2=" $tmpc $cycle2: $type= $value2" + fi + + echo "$line2" >> $outfile + fi + + #----------------------------------------- + # add hyperlink to warning entry + # + line3=" http://www.emc.ncep.noaa.gov/gmb/gdas/radiance/es_rad/${RADMON_SUFFIX}/index.html?sat=${satname}®ion=${region}&channel=${channel}&stat=${type}" + if [[ $channel -gt 0 ]]; then + echo "$line3" >> $outfile + echo "" >> $outfile + fi + fi + fi +done } < $file1 + + +################################################################################ +# Post processing + +exit ${err} + diff --git a/ush/radmon_verf_angle.sh b/ush/radmon_verf_angle.sh new file mode 100755 index 0000000000..b2dab0825a --- /dev/null +++ b/ush/radmon_verf_angle.sh @@ -0,0 +1,235 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_angle.sh +# Script description: Extract angle dependent data from radiance +# diagnostic files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts angle dependent data from radiance +# diagnostic files (which are an output from GSI runs), +# storing the extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_angle.sh PDATE +# +# Input script positional parameters: +# PDATE processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECradmon executable directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# VERBOSE Verbose flag (YES or NO) +# defaults to NO +# LITTLE_ENDIAN flag to indicate LE machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $angle_exec +# +# fixed data : $scaninfo +# +# input data : $data_file +# +# output data: $angle_file +# $angle_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# Command line arguments. +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} # rapid refresh model flag +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} + +export PDATE=${1:-${PDATE:?}} + +echo " REGIONAL_RR, rgnHH, rgnTM = $REGIONAL_RR, $rgnHH, $rgnTM" +netcdf_boolean=".false." +if [[ $RADMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, $netcdf_boolean" + +which prep_step +which startmsg + +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + +# File names +export pgmout=${pgmout:-${jlogfile}} +touch $pgmout + +# Other variables +SATYPE=${SATYPE:-} +VERBOSE=${VERBOSE:-NO} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + + +if [[ $USE_ANL -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + +err=0 +angle_exec=radmon_angle.x +shared_scaninfo=${shared_scaninfo:-$FIXgdas/gdas_radmon_scaninfo.txt} +scaninfo=scaninfo.txt + +#-------------------------------------------------------------------- +# Copy extraction program and supporting files to working directory + +$NCP ${EXECradmon}/${angle_exec} ./ +$NCP $shared_scaninfo ./${scaninfo} + +if [[ ! -s ./${angle_exec} || ! -s ./${scaninfo} ]]; then + err=2 +else +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${angle_exec} + + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) + + ctr=0 + fail=0 + touch "./errfile" + + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + for dtype in ${gesanl}; do + + echo "pgm = $pgm" + echo "pgmout = $pgmout" + prep_step + + ctr=$(expr $ctr + 1) + + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d + ctl_file=${type}_anl.ctl + angl_ctl=angle.${ctl_file} + else + data_file=${type}.${PDATE}.ieee_d + ctl_file=${type}.ctl + angl_ctl=angle.${ctl_file} + fi + + angl_file="" + if [[ $REGIONAL_RR -eq 1 ]]; then + angl_file=${rgnHH}.${data_file}.${rgnTM} + fi + + + if [[ -f input ]]; then rm input; fi + + nchanl=-999 +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + startmsg + ./${angle_exec} < input >> ${pgmout} 2>>errfile + export err=$?; err_chk + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) + fi + + if [[ -s ${angl_file} ]]; then + ${COMPRESS} -f ${angl_file} + fi + + if [[ -s ${angl_ctl} ]]; then + ${COMPRESS} -f ${angl_ctl} + fi + + + done # for dtype in ${gesanl} loop + + done # for type in ${SATYPE} loop + + + ${USHradmon}/rstprod.sh + + tar_file=radmon_angle.tar + if compgen -G "angle*.ieee_d*" > /dev/null || compgen -G "angle*.ctl*" > /dev/null; then + tar -cf $tar_file angle*.ieee_d* angle*.ctl* + ${COMPRESS} ${tar_file} + mv $tar_file.${Z} ${TANKverf_rad}/. + + if [[ $RAD_AREA = "rgn" ]]; then + cwd=$(pwd) + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} + fi + fi + + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + err=3 + fi +fi + +################################################################################ +# Post processing + +exit ${err} diff --git a/ush/radmon_verf_bcoef.sh b/ush/radmon_verf_bcoef.sh new file mode 100755 index 0000000000..374c8db7b2 --- /dev/null +++ b/ush/radmon_verf_bcoef.sh @@ -0,0 +1,233 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_bcoef.sh +# Script description: Extract bias correction coefficients data from radiance +# diagnostic files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts bias correction coefficient related data from +# radiance diagnostic files (which are an output from GSI runs), +# storing the extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_bcoef.sh PDATE +# +# Input script positional parameters: +# PDATE processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECradmon executable directory +# defaults to current directory +# FIXradmon fixed data directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# LITTLE_ENDIAN flag for LE machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $bcoef_exec +# +# fixed data : $biascr +# +# input data : $data_file +# +# output data: $bcoef_file +# $bcoef_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### +# Command line arguments. +export PDATE=${1:-${PDATE:?}} + +netcdf_boolean=".false." +if [[ $RADMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi +echo " RADMON_NETCDF, netcdf_boolean = ${RADMON_NETCDF}, $netcdf_boolean" + +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + +# File names +pgmout=${pgmout:-${jlogfile}} +touch $pgmout + +# Other variables +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} +SATYPE=${SATYPE:-} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + + +err=0 +bcoef_exec=radmon_bcoef.x + +if [[ $USE_ANL -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + +#-------------------------------------------------------------------- +# Copy extraction program and supporting files to working directory + +$NCP $EXECradmon/${bcoef_exec} ./${bcoef_exec} +$NCP ${biascr} ./biascr.txt + +if [[ ! -s ./${bcoef_exec} || ! -s ./biascr.txt ]]; then + err=4 +else + + +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${bcoef_exec} + + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) + + ctr=0 + fail=0 + + nchanl=-999 + npredr=5 + + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + for dtype in ${gesanl}; do + + prep_step + + ctr=$(expr $ctr + 1) + + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d + ctl_file=${type}_anl.ctl + bcoef_ctl=bcoef.${ctl_file} + else + data_file=${type}.${PDATE}.ieee_d + ctl_file=${type}.ctl + bcoef_ctl=bcoef.${ctl_file} + fi + + if [[ $REGIONAL_RR -eq 1 ]]; then + bcoef_file=${rgnHH}.bcoef.${data_file}.${rgnTM} + else + bcoef_file=bcoef.${data_file} + fi + + + if [[ -f input ]]; then rm input; fi + + +cat << EOF > input + &INPUT + satname='${type}', + npredr=${npredr}, + nchanl=${nchanl}, + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + netcdf=${netcdf_boolean}, + / +EOF + startmsg + ./${bcoef_exec} < input >>${pgmout} 2>>errfile + export err=$?; err_chk + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) + fi + + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +# + + if [[ -s ${bcoef_file} ]]; then + ${COMPRESS} ${bcoef_file} + fi + + if [[ -s ${bcoef_ctl} ]]; then + ${COMPRESS} ${bcoef_ctl} + fi + + + done # dtype in $gesanl loop + done # type in $SATYPE loop + + + ${USHradmon}/rstprod.sh + + if compgen -G "bcoef*.ieee_d*" > /dev/null || compgen -G "bcoef*.ctl*" > /dev/null; then + tar_file=radmon_bcoef.tar + tar -cf $tar_file bcoef*.ieee_d* bcoef*.ctl* + ${COMPRESS} ${tar_file} + mv $tar_file.${Z} ${TANKverf_rad} + + if [[ $RAD_AREA = "rgn" ]]; then + cwd=$(pwd) + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} + fi + fi + + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + err=5 + fi +fi + + +################################################################################ +# Post processing + +exit ${err} diff --git a/ush/radmon_verf_bcor.sh b/ush/radmon_verf_bcor.sh new file mode 100755 index 0000000000..3e267f018c --- /dev/null +++ b/ush/radmon_verf_bcor.sh @@ -0,0 +1,226 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_bcor.sh +# Script description: Extract bias correction data from radiance diagnostic +# files. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts bias correction related data from radiance +# diagnostic files (which are an output from GSI runs), storing the +# extracted data in small binary files. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_bcor.sh PDATE +# +# Input script positional parameters: +# PDATE processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECradmon executable directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# LITTLE_ENDIAN flag for little endian machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $bcor_exec +# +# fixed data : none +# +# input data : $data_file +# +# output data: $bcor_file +# $bcor_ctl +# $pgmout +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# Command line arguments. +export PDATE=${1:-${PDATE:?}} + +# Directories +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + +# File names +pgmout=${pgmout:-${jlogfile}} +touch $pgmout + +# Other variables +RAD_AREA=${RAD_AREA:-glb} +SATYPE=${SATYPE:-} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} +USE_ANL=${USE_ANL:-0} + +bcor_exec=radmon_bcor.x +err=0 + +netcdf_boolean=".false." +if [[ $RADMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi + +if [[ $USE_ANL -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + + +#-------------------------------------------------------------------- +# Copy extraction program to working directory + +$NCP ${EXECradmon}/${bcor_exec} ./${bcor_exec} + +if [[ ! -s ./${bcor_exec} ]]; then + err=6 +else + + +#-------------------------------------------------------------------- +# Run program for given time + + export pgm=${bcor_exec} + + iyy=$(echo $PDATE | cut -c1-4) + imm=$(echo $PDATE | cut -c5-6) + idd=$(echo $PDATE | cut -c7-8) + ihh=$(echo $PDATE | cut -c9-10) + + ctr=0 + fail=0 + touch "./errfile" + + for type in ${SATYPE}; do + + for dtype in ${gesanl}; do + + prep_step + + ctr=$(expr $ctr + 1) + + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d + bcor_file=bcor.${data_file} + ctl_file=${type}_anl.ctl + bcor_ctl=bcor.${ctl_file} + stdout_file=stdout.${type}_anl + bcor_stdout=bcor.${stdout_file} + input_file=${type}_anl + else + data_file=${type}.${PDATE}.ieee_d + bcor_file=bcor.${data_file} + ctl_file=${type}.ctl + bcor_ctl=bcor.${ctl_file} + stdout_file=stdout.${type} + bcor_stdout=bcor.${stdout_file} + input_file=${type} + fi + + if [[ -f input ]]; then rm input; fi + + # Check for 0 length input file here and avoid running + # the executable if $input_file doesn't exist or is 0 bytes + # + if [[ -s $input_file ]]; then + nchanl=-999 + +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=6, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + startmsg + ./${bcor_exec} < input >> ${pgmout} 2>>errfile + export err=$?; err_chk + if [[ $? -ne 0 ]]; then + fail=$(expr $fail + 1) + fi + + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +# + + if [[ -s ${bcor_file} ]]; then + ${COMPRESS} ${bcor_file} + fi + + if [[ -s ${bcor_ctl} ]]; then + ${COMPRESS} ${bcor_ctl} + fi + + fi + done # dtype in $gesanl loop + done # type in $SATYPE loop + + + ${USHradmon}/rstprod.sh + tar_file=radmon_bcor.tar + + if compgen -G "bcor*.ieee_d*" > /dev/null || compgen -G "bcor*.ctl*" > /dev/null; then + tar -cf $tar_file bcor*.ieee_d* bcor*.ctl* + ${COMPRESS} ${tar_file} + mv $tar_file.${Z} ${TANKverf_rad}/. + + if [[ $RAD_AREA = "rgn" ]]; then + cwd=$(pwd) + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} + fi + fi + + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + err=7 + fi +fi + +################################################################################ +# Post processing + +exit ${err} + diff --git a/ush/radmon_verf_time.sh b/ush/radmon_verf_time.sh new file mode 100755 index 0000000000..51743277c9 --- /dev/null +++ b/ush/radmon_verf_time.sh @@ -0,0 +1,567 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +################################################################################ +#### UNIX Script Documentation Block +# . . +# Script name: radmon_verf_time.sh +# Script description: Extract time data from radiance diagnostic files, +# perform data integrity checks. +# +# Author: Ed Safford Org: NP23 Date: 2012-02-02 +# +# Abstract: This script extracts time related data from radiance diagnostic +# files (which are an output from GSI runs), storing the extracted +# data in small binary files. Data integrity checks are performed +# on the data and mail messages are sent if potential errors are +# detected. +# +# This script is a child script of exgdas_vrfyrad.sh.sms. The parent +# script opens and uncompresses the radiance diagnostic file and copies +# other supporting files into a temporary working directory. +# +# +# Usage: radmon_verf_time.sh PDATE +# +# Input script positional parameters: +# PDATE processing date +# yyyymmddcc format; required +# +# Imported Shell Variables: +# DO_DATA_RPT switch to build the data report +# defaults to 1 (on) +# RADMON_SUFFIX data source suffix +# defauls to opr +# EXECradmon executable directory +# defaults to current directory +# FIXgdas fixed data directory +# defaults to current directory +# RAD_AREA global or regional flag +# defaults to global +# TANKverf_rad data repository +# defaults to current directory +# SATYPE list of satellite/instrument sources +# defaults to none +# VERBOSE Verbose flag (YES or NO) +# defaults to NO +# LITTLE_ENDIAN flag for little endian machine +# defaults to 0 (big endian) +# USE_ANL use analysis files as inputs in addition to +# the ges files. Default is 0 (ges only) +# +# Modules and files referenced: +# scripts : +# +# programs : $NCP +# $time_exec +# +# fixed data : gdas_radmon_base.tar +# +# input data : $data_file +# +# output data: $time_file +# $time_ctl +# $pgmout +# $bad_pen +# $bad_chan +# $report +# $diag_report +# +# +# Remarks: +# +# Condition codes +# 0 - no problem encountered +# >0 - some problem encountered +# +#################################################################### + +# Command line arguments. +export PDATE=${1:-${PDATE:?}} + +# Directories +FIXgdas=${FIXgdas:-$(pwd)} +EXECradmon=${EXECradmon:-$(pwd)} +TANKverf_rad=${TANKverf_rad:-$(pwd)} + +# File names +#pgmout=${pgmout:-${jlogfile}} +#touch $pgmout + +radmon_err_rpt=${radmon_err_rpt:-${USHradmon}/radmon_err_rpt.sh} +base_file=${base_file:-$FIXgdas/gdas_radmon_base.tar} +report=report.txt +disclaimer=disclaimer.txt + +diag_report=diag_report.txt +diag_hdr=diag_hdr.txt +diag=diag.txt + +obs_err=obs_err.txt +obs_hdr=obs_hdr.txt +pen_err=pen_err.txt +pen_hdr=pen_hdr.txt + +chan_err=chan_err.txt +chan_hdr=chan_hdr.txt +count_hdr=count_hdr.txt +count_err=count_err.txt + +netcdf_boolean=".false." +if [[ $RADMON_NETCDF -eq 1 ]]; then + netcdf_boolean=".true." +fi + +DO_DATA_RPT=${DO_DATA_RPT:-1} +RADMON_SUFFIX=${RADMON_SUFFIX:-opr} +RAD_AREA=${RAD_AREA:-glb} +REGIONAL_RR=${REGIONAL_RR:-0} +rgnHH=${rgnHH:-} +rgnTM=${rgnTM:-} +SATYPE=${SATYPE:-} +VERBOSE=${VERBOSE:-NO} +LITTLE_ENDIAN=${LITTLE_ENDIAN:-0} + +time_exec=radmon_time.x +USE_ANL=${USE_ANL:-0} +err=0 + +if [[ $USE_ANL -eq 1 ]]; then + gesanl="ges anl" +else + gesanl="ges" +fi + + +#-------------------------------------------------------------------- +# Copy extraction program and base files to working directory +#------------------------------------------------------------------- +$NCP ${EXECradmon}/${time_exec} ./ +if [[ ! -s ./${time_exec} ]]; then + err=8 +fi + +iyy=$(echo $PDATE | cut -c1-4) +imm=$(echo $PDATE | cut -c5-6) +idd=$(echo $PDATE | cut -c7-8) +ihh=$(echo $PDATE | cut -c9-10) +cyc=$ihh +CYCLE=$cyc + +local_base="local_base" +if [[ $DO_DATA_RPT -eq 1 ]]; then + + if [[ -e ${base_file}.${Z} ]]; then + $NCP ${base_file}.${Z} ./${local_base}.{Z} + ${UNCOMPRESS} ${local_base}.${Z} + else + $NCP ${base_file} ./${local_base} + fi + + if [[ ! -s ./${local_base} ]]; then + echo "RED LIGHT: local_base file not found" + else + echo "Confirming local_base file is good = ${local_base}" + tar -xf ./${local_base} + echo "local_base is untarred" + fi +fi + +if [[ $err -eq 0 ]]; then + ctr=0 + fail=0 + + export pgm=${time_exec} +#-------------------------------------------------------------------- +# Loop over each entry in SATYPE +#-------------------------------------------------------------------- + for type in ${SATYPE}; do + + if [[ ! -s ${type} ]]; then + echo "ZERO SIZED: ${type}" + continue + fi + + ctr=$(expr $ctr + 1) + + for dtype in ${gesanl}; do + + if [[ -f input ]]; then rm input; fi + + if [[ $dtype == "anl" ]]; then + data_file=${type}_anl.${PDATE}.ieee_d + ctl_file=${type}_anl.ctl + time_ctl=time.${ctl_file} + else + data_file=${type}.${PDATE}.ieee_d + ctl_file=${type}.ctl + time_ctl=time.${ctl_file} + fi + + if [[ $REGIONAL_RR -eq 1 ]]; then + time_file=${rgnHH}.time.${data_file}.${rgnTM} + else + time_file=time.${data_file} + fi + +#-------------------------------------------------------------------- +# Run program for given satellite/instrument +#-------------------------------------------------------------------- + nchanl=-999 +cat << EOF > input + &INPUT + satname='${type}', + iyy=${iyy}, + imm=${imm}, + idd=${idd}, + ihh=${ihh}, + idhh=-720, + incr=${CYCLE_INTERVAL}, + nchanl=${nchanl}, + suffix='${RADMON_SUFFIX}', + gesanl='${dtype}', + little_endian=${LITTLE_ENDIAN}, + rad_area='${RAD_AREA}', + netcdf=${netcdf_boolean}, + / +EOF + + ./${time_exec} < input >> stdout.${type} 2>>errfile + + if [[ $err -ne 0 ]]; then + fail=$(expr $fail + 1) + fi + +#------------------------------------------------------------------- +# move data, control, and stdout files to $TANKverf_rad and compress +#------------------------------------------------------------------- + cat stdout.${type} >> stdout.time + + if [[ -s ${time_file} ]]; then + ${COMPRESS} ${time_file} + fi + + if [[ -s ${time_ctl} ]]; then + ${COMPRESS} ${time_ctl} + fi + + done + done + + + ${USHradmon}/rstprod.sh + + if compgen -G "time*.ieee_d*" > /dev/null || compgen -G "time*.ctl*" > /dev/null; then + tar_file=radmon_time.tar + tar -cf $tar_file time*.ieee_d* time*.ctl* + ${COMPRESS} ${tar_file} + mv $tar_file.${Z} ${TANKverf_rad}/. + + if [[ $RAD_AREA = "rgn" ]]; then + cwd=$(pwd) + cd ${TANKverf_rad} + tar -xf ${tar_file}.${Z} + rm ${tar_file}.${Z} + cd ${cwd} + fi + fi + + if [[ $ctr -gt 0 && $fail -eq $ctr || $fail -gt $ctr ]]; then + echo "fail, ctr = $fail, $ctr" + err=10 + fi + +fi + + + +#################################################################### +#------------------------------------------------------------------- +# Begin error analysis and reporting +#------------------------------------------------------------------- +#################################################################### + +if [[ $DO_DATA_RPT -eq 1 ]]; then + +#--------------------------- +# build report disclaimer +# + cat << EOF > ${disclaimer} + + +*********************** WARNING *************************** +THIS IS AN AUTOMATED EMAIL. REPLIES TO SENDER WILL NOT BE +RECEIVED. PLEASE DIRECT REPLIES TO edward.safford@noaa.gov +*********************** WARNING *************************** +EOF + + +#------------------------------------------------------------------- +# Check for missing diag files +# + tmp_satype="./tmp_satype.txt" + echo ${SATYPE} > ${tmp_satype} + ${USHradmon}/radmon_diag_ck.sh --rad ${radstat} --sat ${tmp_satype} --out ${diag} + + if [[ -s ${diag} ]]; then + cat << EOF > ${diag_hdr} + + Problem Reading Diagnostic File + + + Problems were encountered reading the diagnostic file for + the following sources: + +EOF + + cat ${diag_hdr} >> ${diag_report} + cat ${diag} >> ${diag_report} + + echo >> ${diag_report} + + rm ${diag_hdr} + fi + +#------------------------------------------------------------------- +# move warning notification to TANKverf +# + if [[ -s ${diag} ]]; then + lines=$(wc -l <${diag}) + echo "lines in diag = $lines" + + if [[ $lines -gt 0 ]]; then + cat ${diag_report} + cp ${diag} ${TANKverf_rad}/bad_diag.${PDATE} + else + rm ${diag_report} + fi + fi + + + + #---------------------------------------------------------------- + # Identify bad_pen and bad_chan files for this cycle and + # previous cycle + + bad_pen=bad_pen.${PDATE} + bad_chan=bad_chan.${PDATE} + low_count=low_count.${PDATE} + + qdate=$($NDATE -${CYCLE_INTERVAL} $PDATE) + pday=$(echo $qdate | cut -c1-8) + + prev_bad_pen=bad_pen.${qdate} + prev_bad_chan=bad_chan.${qdate} + prev_low_count=low_count.${qdate} + + prev_bad_pen=${TANKverf_radM1}/${prev_bad_pen} + prev_bad_chan=${TANKverf_radM1}/${prev_bad_chan} + prev_low_count=${TANKverf_radM1}/${prev_low_count} + + if [[ -s $bad_pen ]]; then + echo "pad_pen = $bad_pen" + fi + if [[ -s $prev_bad_pen ]]; then + echo "prev_pad_pen = $prev_bad_pen" + fi + + if [[ -s $bad_chan ]]; then + echo "bad_chan = $bad_chan" + fi + if [[ -s $prev_bad_chan ]]; then + echo "prev_bad_chan = $prev_bad_chan" + fi + if [[ -s $low_count ]]; then + echo "low_count = $low_count" + fi + if [[ -s $prev_low_count ]]; then + echo "prev_low_count = $prev_low_count" + fi + + do_pen=0 + do_chan=0 + do_cnt=0 + + if [[ -s $bad_pen && -s $prev_bad_pen ]]; then + do_pen=1 + fi + + if [[ -s $low_count && -s $prev_low_count ]]; then + do_cnt=1 + fi + + #-------------------------------------------------------------------- + # avoid doing the bad_chan report for REGIONAL_RR sources -- because + # they run hourly they often have 0 count channels for off-hour runs. + # + if [[ -s $bad_chan && -s $prev_bad_chan && REGIONAL_RR -eq 0 ]]; then + do_chan=1 + fi + + #-------------------------------------------------------------------- + # Remove extra spaces in new bad_pen & low_count files + # + if [[ -s ${bad_pen} ]]; then + gawk '{$1=$1}1' $bad_pen > tmp.bad_pen + mv -f tmp.bad_pen $bad_pen + fi + if [[ -s ${low_count} ]]; then + gawk '{$1=$1}1' $low_count > tmp.low_count + mv -f tmp.low_count $low_count + fi + + echo " do_pen, do_chan, do_cnt = $do_pen, $do_chan, $do_cnt" + echo " diag_report = $diag_report " + if [[ $do_pen -eq 1 || $do_chan -eq 1 || $do_cnt -eq 1 || -s ${diag_report} ]]; then + + if [[ $do_pen -eq 1 ]]; then + + echo "calling radmon_err_rpt for pen" + ${radmon_err_rpt} ${prev_bad_pen} ${bad_pen} pen ${qdate} \ + ${PDATE} ${diag_report} ${pen_err} + fi + + if [[ $do_chan -eq 1 ]]; then + + echo "calling radmon_err_rpt for chan" + ${radmon_err_rpt} ${prev_bad_chan} ${bad_chan} chan ${qdate} \ + ${PDATE} ${diag_report} ${chan_err} + fi + + if [[ $do_cnt -eq 1 ]]; then + + echo "calling radmon_err_rpt for cnt" + ${radmon_err_rpt} ${prev_low_count} ${low_count} cnt ${qdate} \ + ${PDATE} ${diag_report} ${count_err} + fi + + #------------------------------------------------------------------- + # put together the unified error report with any obs, chan, and + # penalty problems and mail it + + if [[ -s ${obs_err} || -s ${pen_err} || -s ${chan_err} || -s ${count_err} || -s ${diag_report} ]]; then + + echo DOING ERROR REPORTING + + + cat << EOF > $report +Radiance Monitor warning report + + Net: ${RADMON_SUFFIX} + Run: ${RUN} + Cycle: $PDATE + +EOF + + if [[ -s ${diag_report} ]]; then + echo OUTPUTING DIAG_REPORT + cat ${diag_report} >> $report + fi + + if [[ -s ${chan_err} ]]; then + + echo OUTPUTING CHAN_ERR + + cat << EOF > ${chan_hdr} + + The following channels report 0 observational counts over the past two cycles: + + Satellite/Instrument Channel + ==================== ======= + +EOF + + cat ${chan_hdr} >> $report + cat ${chan_err} >> $report + + fi + + if [[ -s ${count_err} ]]; then + + cat << EOF > ${count_hdr} + + + + The following channels report abnormally low observational counts in the latest 2 cycles: + +Satellite/Instrument Obs Count Avg Count +==================== ========= ========= + +EOF + + cat ${count_hdr} >> $report + cat ${count_err} >> $report + fi + + + if [[ -s ${pen_err} ]]; then + + cat << EOF > ${pen_hdr} + + + Penalty values outside of the established normal range were found + for these sensor/channel/regions in the past two cycles: + + Questionable Penalty Values + ============ ======= ====== Cycle Penalty Bound + ----- ------- ----- +EOF + cat ${pen_hdr} >> $report + cat ${pen_err} >> $report + rm -f ${pen_hdr} + rm -f ${pen_err} + fi + + echo >> $report + cat ${disclaimer} >> $report + echo >> $report + fi + + #------------------------------------------------------------------- + # dump report to log file + # + if [[ -s ${report} ]]; then + lines=$(wc -l <${report}) + if [[ $lines -gt 2 ]]; then + cat ${report} + + $NCP ${report} ${TANKverf_rad}/warning.${PDATE} + fi + fi + + + fi + + #------------------------------------------------------------------- + # copy new bad_pen, bad_chan, and low_count files to $TANKverf_rad + # + if [[ -s ${bad_chan} ]]; then + mv ${bad_chan} ${TANKverf_rad}/. + fi + + if [[ -s ${bad_pen} ]]; then + mv ${bad_pen} ${TANKverf_rad}/. + fi + + if [[ -s ${low_count} ]]; then + mv ${low_count} ${TANKverf_rad}/. + fi + + +fi + + for type in ${SATYPE}; do + rm -f stdout.${type} + done + +################################################################################ +#------------------------------------------------------------------- +# end error reporting section +#------------------------------------------------------------------- +################################################################################ + +################################################################################ +# Post processing + +exit ${err} diff --git a/ush/rocoto/.gitignore b/ush/rocoto/.gitignore deleted file mode 100644 index 7f98f9b45c..0000000000 --- a/ush/rocoto/.gitignore +++ /dev/null @@ -1,47 +0,0 @@ -# Compiled source # -################### -*.com -*.class -*.dll -*.exe -*.o -*.mod -*.so -*.pyc - -# Temporary files # -################### -*.swp -*.swo -*~ - -# Packages # -############ -*.7z -*.dmg -*.gz -*.iso -*.jar -*.rar -*.tar -*.zip - -# Logs and databases # -###################### -*.log -*.sql -*.sqlite - -# OS generated files # -###################### -.DS_Store* -ehthumbs.db -Icon? -Thumbs.db - -*.lock -package-lock.json - -# Subversion and Git directories -.svn -.git diff --git a/ush/rocoto/fv3gfs_workflow.sh b/ush/rocoto/fv3gfs_workflow.sh deleted file mode 100755 index 7331b646f7..0000000000 --- a/ush/rocoto/fv3gfs_workflow.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/bin/sh - -# Checkout, build, setup and execute the workflow - -set -ex - -fv3gfs_tag="https://svnemc.ncep.noaa.gov/projects/fv3gfs/trunk" - -pslot="fv3test" -expdir="/path/to/expdir" -comrot="/path/to/comrot" -fv3gfs="/path/to/fv3gfs_tag/checkout" -idate="2017073118" -edate="2017080112" - -###################################### -# USER NEED NOT MODIFY BELOW THIS LINE -###################################### - -if [ -d /gpfs/hps3 ]; then - machine="cray" - icsdir="/gpfs/hps3/emc/global/noscrub/emc.glopara/ICS" -else - echo "Unknown machine $machine, ABORT!" - exit -1 -fi - -[[ -d $expdir/$pslot ]] && rm -rf $expdir/$pslot -[[ -d $comrot/$pslot ]] && rm -rf $comrot/$pslot -[[ -d $fv3gfs/$pslot ]] && rm -rf $fv3gfs/$pslot - -gfs_ver=v15.0.0 -mkdir -p $fv3gfs -cd $fv3gfs -git clone --recursive gerrit:fv3gfs gfs.${gfs_ver} - -cd $fv3gfs/gfs.${gfs_ver}/sorc -sh checkout.sh -sh build_all.sh $machine -sh link_fv3gfs.sh emc $machine - -cd $fv3gfs/gfs.${gfs_ver}/ush/rocoto -python setup_expt.py --pslot $pslot --comrot $comrot --expdir $expdir --idate $idate --edate $edate --icsdir $icsdir --configdir ../parm/config -python setup_workflow.py --expdir $expdir/$pslot - -cd $expdir/$pslot -crontab $pslot.crontab - -exit diff --git a/ush/rocoto/rocoto.py b/ush/rocoto/rocoto.py deleted file mode 100755 index 2a027fdc4f..0000000000 --- a/ush/rocoto/rocoto.py +++ /dev/null @@ -1,379 +0,0 @@ -#!/usr/bin/env python3 - -''' - MODULE: - rocoto.py - - ABOUT: - Helper module to create tasks, metatasks, and dependencies -''' - -def create_metatask(task_dict, metatask_dict): - ''' - create a Rocoto metatask given a dictionary containing task and metatask information - :param metatask_dict: metatask key-value parameters - :type metatask_dict: dict - :param task_dict: task key-value parameters - :type task_dict: dict - :return: Rocoto metatask - :rtype: list - ''' - - # Grab metatask info from the metatask_dict - metataskname = metatask_dict.get('metataskname', 'demometatask') - varname = metatask_dict.get('varname', 'demovar') - varval = metatask_dict.get('varval', 1) - vardict = metatask_dict.get('vardict', None) - - strings = [] - - strings.append(f'\n') - strings.append('\n') - strings.append(f'\t{str(varval)}\n') - if vardict is not None: - for key in vardict.keys(): - value = str(vardict[key]) - strings.append(f'\t{value}\n') - strings.append('\n') - tasklines = create_task(task_dict) - for tl in tasklines: - strings.append(f'{tl}') if tl == '\n' else strings.append(f'\t{tl}') - strings.append('\n') - strings.append('\n') - - return strings - - -def create_task(task_dict): - ''' - create a Rocoto task given a dictionary containing task information - :param task_dict: task key-value parameters - :type task_dict: dict - :return: Rocoto task - :rtype: list - ''' - - # Grab task info from the task_dict - taskname = task_dict.get('taskname', 'demotask') - cycledef = task_dict.get('cycledef', 'democycle') - maxtries = task_dict.get('maxtries', 3) - final = task_dict.get('final', False) - command = task_dict.get('command', 'sleep 10') - jobname = task_dict.get('jobname', 'demojob') - account = task_dict.get('account', 'batch') - queue = task_dict.get('queue', 'debug') - partition = task_dict.get('partition', None) - walltime = task_dict.get('walltime', '00:01:00') - log = task_dict.get('log', 'demo.log') - native = task_dict.get('native', None) - memory = task_dict.get('memory', None) - resources = task_dict.get('resources', None) - envar = task_dict.get('envar', None) - dependency = task_dict.get('dependency', None) - - str_maxtries = str(maxtries) - str_final = ' final="true"' if final else '' - envar = envar if isinstance(envar, list) else [envar] - - strings = [] - - strings.append(f'\n') - strings.append('\n') - strings.append(f'\t{command}\n') - strings.append('\n') - strings.append(f'\t{jobname}\n') - strings.append(f'\t{account}\n') - strings.append(f'\t{queue}\n') - if partition is not None: - strings.append(f'\t{partition}\n') - if resources is not None: - strings.append(f'\t{resources}\n') - strings.append(f'\t{walltime}\n') - if memory is not None: - strings.append(f'\t{memory}\n') - if native is not None: - strings.append(f'\t{native}\n') - strings.append('\n') - strings.append(f'\t{log}\n') - strings.append('\n') - - if envar[0] is not None: - for e in envar: - strings.append(f'\t{e}\n') - strings.append('\n') - - if dependency is not None: - strings.append('\t\n') - for d in dependency: - strings.append(f'\t\t{d}\n') - strings.append('\t\n') - strings.append('\n') - - strings.append('\n') - - return strings - - -def add_dependency(dep_dict): - ''' - create a simple Rocoto dependency given a dictionary with dependency information - :param dep_dict: dependency key-value parameters - :type dep_dict: dict - :return: Rocoto simple dependency - :rtype: str - ''' - - dep_condition = dep_dict.get('condition', None) - dep_type = dep_dict.get('type', None) - - if dep_type in ['task', 'metatask']: - - string = add_task_tag(dep_dict) - - elif dep_type in ['data']: - - string = add_data_tag(dep_dict) - - elif dep_type in ['cycleexist']: - - string = add_cycle_tag(dep_dict) - - elif dep_type in ['streq', 'strneq']: - - string = add_streq_tag(dep_dict) - - else: - - msg = f'Unknown dependency type {dep_dict["type"]}' - raise KeyError(msg) - - if dep_condition is not None: - string = f'<{dep_condition}>{string}' - - return string - - -def add_task_tag(dep_dict): - ''' - create a simple task or metatask tag - :param dep_dict: dependency key-value parameters - :type dep_dict: dict - :return: Rocoto simple task or metatask dependency - :rtype: str - ''' - - dep_type = dep_dict.get('type', None) - dep_name = dep_dict.get('name', None) - dep_offset = dep_dict.get('offset', None) - - if dep_name is None: - msg = f'a {dep_type} name is necessary for {dep_type} dependency' - raise KeyError(msg) - - string = '<' - string += f'{dep_type}dep {dep_type}="{dep_name}"' - if dep_offset is not None: - string += f' cycle_offset="{dep_offset}"' - string += '/>' - - return string - -def add_data_tag(dep_dict): - ''' - create a simple data tag - :param dep_dict: dependency key-value parameters - :type dep_dict: dict - :return: Rocoto simple task or metatask dependency - :rtype: str - ''' - - dep_type = dep_dict.get('type', None) - dep_data = dep_dict.get('data', None) - dep_offset = dep_dict.get('offset', None) - - if dep_data is None: - msg = f'a data value is necessary for {dep_type} dependency' - raise KeyError(msg) - - if dep_offset is None: - if '@' in dep_data: - offset_string_b = '' - offset_string_e = '' - else: - offset_string_b = '' - offset_string_e = '' - else: - offset_string_b = f'' - offset_string_e = '' - - string = '' - string += f'{offset_string_b}{dep_data}{offset_string_e}' - string += '' - - return string - -def add_cycle_tag(dep_dict): - ''' - create a simple cycle exist tag - :param dep_dict: dependency key-value parameters - :type dep_dict: dict - :return: Rocoto simple task or metatask dependency - :rtype: str - ''' - - dep_type = dep_dict.get('type', None) - dep_offset = dep_dict.get('offset', None) - - if dep_offset is None: - msg = f'an offset value is necessary for {dep_type} dependency' - raise KeyError(msg) - - string = f'' - - return string - -def add_streq_tag(dep_dict): - ''' - create a simple string comparison tag - :param dep_dict: dependency key-value parameters - :type dep_dict: dict - :return: Rocoto simple task or metatask dependency - :rtype: str - ''' - - dep_type = dep_dict.get('type', None) - dep_left = dep_dict.get('left', None) - dep_right = dep_dict.get('right', None) - - fail = False - msg = '' - if dep_left is None: - msg += f'a left value is necessary for {dep_type} dependency' - fail = True - if dep_right is None: - if fail: - msg += '\n' - msg += f'a right value is necessary for {dep_type} dependency' - fail = True - if fail: - raise KeyError(msg) - - string = f'<{dep_type}>{dep_left}{dep_right}' - - return string - - -def _traverse(o, tree_types=(list, tuple)): - ''' - Traverse through a list of lists or tuples and yeild the value - Objective is to flatten a list of lists or tuples - :param o: list of lists or not - :type o: list, tuple, scalar - :param tree_types: trees to travers - :type tree_types: tuple - :return: value in the list or tuple - :rtype: scalar - ''' - - if isinstance(o, tree_types): - for value in o: - for subvalue in _traverse(value, tree_types): - yield subvalue - else: - yield o - - -def create_dependency(dep_condition=None, dep=None): - ''' - create a compound dependency given a list of dependendies, and compounding condition - the list of dependencies are created using add_dependency - :param dep_condition: dependency condition - :type dep_condition: boolean e.g. and, or, true, false - :param dep: dependency - :type dep: str or list - :return: Rocoto compound dependency - :rtype: list - ''' - - dep = dep if isinstance(dep, list) else [dep] - - strings = [] - - if dep_condition is not None: - strings.append(f'<{dep_condition}>') - - if dep[0] is not None: - for d in dep: - if dep_condition is None: - strings.append(f'{d}') - else: - for e in _traverse(d): - strings.append(f'\t{e}') - - if dep_condition is not None: - strings.append(f'') - - return strings - - -def create_envar(name=None,value=None): - ''' - create an Rocoto environment variable given name and value - returns the environment variable as a string - :param name: name of the environment variable - :type name: str - :param value: value of the environment variable - :type value: str or float or int or unicode - :return: Rocoto environment variable key-value pair - :rtype: str - ''' - - string = '' - string += '' - string += f'{name}' - string += f'{str(value)}' - string += '' - - return string - - -def create_cycledef(group=None, start=None, stop=None, step=None): - ''' - create an Rocoto cycle definition - returns the environment variable as a string - :param group: cycle definition group name - :type group: str - :param start: cycle start datetime - :type start: str - :param end: cycle end datetime - :type stop: str - :param step: cycle interval (timedelta) - :type interval: str - :param value: value of the environment variable - :type value: str or float or int or unicode - :return: Rocoto cycledef variable string - :rtype: str - ''' - - string = '' - string += f'' - string += f'{start} {stop} {step}' - string += '' - - return string - - -def create_entity(name=None, value=None): - ''' - create an XML ENTITY variable given name and value - returns the variable as a string - :param name: name of the variable - :type name: str - :param value: value of the variable - :type value: str or float or int or unicode - :return: XML entity variable key-value pair - :rtype: str - ''' - - return f'' - diff --git a/ush/rocoto/setup_expt.py b/ush/rocoto/setup_expt.py deleted file mode 100755 index 8a6f161c79..0000000000 --- a/ush/rocoto/setup_expt.py +++ /dev/null @@ -1,297 +0,0 @@ -#!/usr/bin/env python3 - -''' -Entry point for setting up an experiment in the global-workflow -''' - -import os -import glob -import shutil -from datetime import datetime -from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -import workflow_utils as wfu - - -def makedirs_if_missing(dirname): - ''' - Creates a directory if not already present - ''' - if not os.path.exists(dirname): - os.makedirs(dirname) - - -def fill_COMROT(host, inputs): - ''' - Method to populate the COMROT for supported modes. - INPUTS: - host: host specific object from class HostInfo in workflow_utils.py - inputs: user inputs to setup_expt.py - ''' - - fill_modes = { - 'cycled': fill_COMROT_cycled, - 'forecast-only': fill_COMROT_forecasts - } - - try: - fill_modes[inputs.mode](host, inputs) - except KeyError: - raise NotImplementedError(f'{mode} is not a supported mode.\n' + - 'Currently supported modes are:\n' + - f'{" | ".join(fill_modes.keys())}') - - return - - -def fill_COMROT_cycled(host, inputs): - ''' - Implementation of 'fill_COMROT' for cycled mode - ''' - - idatestr = inputs.idate.strftime('%Y%m%d%H') - comrot = os.path.join(inputs.comrot, inputs.pslot) - - if inputs.icsdir is not None: - # Link ensemble member initial conditions - enkfdir = f'enkf{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' - makedirs_if_missing(os.path.join(comrot, enkfdir)) - for ii in range(1, inputs.nens + 1): - makedirs_if_missing(os.path.join(comrot, enkfdir, f'mem{ii:03d}')) - os.symlink(os.path.join(inputs.icsdir, idatestr, f'C{inputs.resens}', f'mem{ii:03d}', 'RESTART'), - os.path.join(comrot, enkfdir, f'mem{ii:03d}', 'RESTART')) - - # Link deterministic initial conditions - detdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' - makedirs_if_missing(os.path.join(comrot, detdir)) - os.symlink(os.path.join(inputs.icsdir, idatestr, f'C{inputs.resdet}', 'control', 'RESTART'), - os.path.join(comrot, detdir, 'RESTART')) - - # Link bias correction and radiance diagnostics files - for fname in ['abias', 'abias_pc', 'abias_air', 'radstat']: - os.symlink(os.path.join(inputs.icsdir, idatestr, f'{inputs.cdump}.t{idatestr[8:]}z.{fname}'), - os.path.join(comrot, detdir, f'{inputs.cdump}.t{idatestr[8:]}z.{fname}')) - - return - - -def fill_COMROT_forecasts(host, inputs): - ''' - Implementation of 'fill_COMROT' for forecast-only mode - ''' - return - - -def fill_EXPDIR(inputs): - ''' - Method to copy config files from workflow to experiment directory - INPUTS: - inputs: user inputs to `setup_expt.py` - ''' - configdir = inputs.configdir - expdir = os.path.join(inputs.expdir, inputs.pslot) - - configs = glob.glob(f'{configdir}/config.*') - exclude_configs = ['base', 'base.emc.dyn', 'base.nco.static', 'fv3.nco.static'] - for exclude in exclude_configs: - try: - configs.remove(f'{configdir}/config.{exclude}') - except ValueError: - pass - if len(configs) == 0: - raise IOError(f'no config files found in {configdir}') - for config in configs: - shutil.copy(config, expdir) - - return - - -def edit_baseconfig(host, inputs): - ''' - Parses and populates the templated `config.base.emc.dyn` to `config.base` - ''' - - here = os.path.dirname(__file__) - top = os.path.abspath(os.path.join( - os.path.abspath(here), '../..')) - - tmpl_dict = { - "@MACHINE@": host.machine.upper(), - "@PSLOT@": inputs.pslot, - "@SDATE@": inputs.idate.strftime('%Y%m%d%H'), - "@EDATE@": inputs.edate.strftime('%Y%m%d%H'), - "@CASECTL@": f'C{inputs.resdet}', - "@HOMEgfs@": top, - "@BASE_GIT@": host.info["base_git"], - "@DMPDIR@": host.info["dmpdir"], - "@NWPROD@": host.info["nwprod"], - "@COMROOT@": host.info["comroot"], - "@HOMEDIR@": host.info["homedir"], - "@EXPDIR@": inputs.expdir, - "@ROTDIR@": inputs.comrot, - "@ICSDIR@": inputs.icsdir, - "@STMP@": host.info["stmp"], - "@PTMP@": host.info["ptmp"], - "@NOSCRUB@": host.info["noscrub"], - "@ACCOUNT@": host.info["account"], - "@QUEUE@": host.info["queue"], - "@QUEUE_SERVICE@": host.info["queue_service"], - "@PARTITION_BATCH@": host.info["partition_batch"], - "@EXP_WARM_START@": inputs.warm_start, - "@MODE@": inputs.mode, - "@CHGRP_RSTPROD@": host.info["chgrp_rstprod"], - "@CHGRP_CMD@": host.info["chgrp_cmd"], - "@HPSSARCH@": host.info["hpssarch"], - "@LOCALARCH@": host.info["localarch"], - "@ATARDIR@": host.info["atardir"], - "@gfs_cyc@": inputs.gfs_cyc, - "@APP@": inputs.app, - } - - if inputs.mode in ['cycled']: - extend_dict = { - "@CASEENS@": f'C{inputs.resens}', - "@NMEM_ENKF@": inputs.nens, - } - elif inputs.mode in ['forecast-only']: - extend_dict = { - "@DO_AERO@": inputs.aerosols, - } - tmpl_dict = dict(tmpl_dict, **extend_dict) - - # Open and read the templated config.base.emc.dyn - base_tmpl = f'{inputs.configdir}/config.base.emc.dyn' - with open(base_tmpl, 'rt') as fi: - basestr = fi.read() - - for key, val in tmpl_dict.items(): - basestr = basestr.replace(key, str(val)) - - # Write and clobber the experiment config.base - base_config = f'{inputs.expdir}/{inputs.pslot}/config.base' - if os.path.exists(base_config): - os.unlink(base_config) - - with open(base_config, 'wt') as fo: - fo.write(basestr) - - print('') - print(f'EDITED: {base_config} as per user input.') - print(f'DEFAULT: {base_tmpl} is for reference only.') - print('') - - return - - -def input_args(): - ''' - Method to collect user arguments for `setup_expt.py` - ''' - - here = os.path.dirname(__file__) - top = os.path.abspath(os.path.join( - os.path.abspath(here), '../..')) - - description = """ - Setup files and directories to start a GFS parallel.\n - Create EXPDIR, copy config files.\n - Create COMROT experiment directory structure, - link initial condition files from $ICSDIR to $COMROT - """ - - parser = ArgumentParser(description=description, - formatter_class=ArgumentDefaultsHelpFormatter) - - # Set up sub-parsers for various modes of experimentation - subparser = parser.add_subparsers(dest='mode') - cycled = subparser.add_parser( - 'cycled', help='arguments for cycled mode') - forecasts = subparser.add_parser( - 'forecast-only', help='arguments for forecast-only mode') - - # Common arguments across all modes - for subp in [cycled, forecasts]: - subp.add_argument('--pslot', help='parallel experiment name', - type=str, required=False, default='test') - subp.add_argument('--resdet', help='resolution of the deterministic model forecast', - type=int, required=False, default=384) - subp.add_argument('--comrot', help='full path to COMROT', - type=str, required=False, default=os.getenv('HOME')) - subp.add_argument('--expdir', help='full path to EXPDIR', - type=str, required=False, default=os.getenv('HOME')) - subp.add_argument('--idate', help='starting date of experiment, initial conditions must exist!', required=True, type=lambda dd: datetime.strptime(dd, '%Y%m%d%H')) - subp.add_argument('--edate', help='end date experiment', required=True, type=lambda dd: datetime.strptime(dd, '%Y%m%d%H')) - subp.add_argument('--icsdir', help='full path to initial condition directory', type=str, required=False, default=None) - subp.add_argument('--configdir', help='full path to directory containing the config files', - type=str, required=False, default=os.path.join(top, 'parm/config')) - subp.add_argument('--cdump', help='CDUMP to start the experiment', - type=str, required=False, default='gdas') - subp.add_argument('--gfs_cyc', help='GFS cycles to run', type=int, - choices=[0, 1, 2, 4], default=1, required=False) - subp.add_argument('--start', help='restart mode: warm or cold', type=str, - choices=['warm', 'cold'], required=False, default='cold') - - # cycled mode additional arguments - cycled.add_argument('--resens', help='resolution of the ensemble model forecast', - type=int, required=False, default=192) - cycled.add_argument('--nens', help='number of ensemble members', - type=int, required=False, default=20) - cycled.add_argument('--app', help='UFS application', type=str, - choices=['ATM', 'ATMW'], required=False, default='ATM') - - # forecast only mode additional arguments - forecasts.add_argument('--app', help='UFS application', type=str, choices=[ - 'ATM', 'ATMW', 'S2S', 'S2SW'], required=False, default='ATM') - forecasts.add_argument('--aerosols', help="Run with coupled aerosols", required=False, - action='store_const', const="YES", default="NO") - - args = parser.parse_args() - - if args.app in ['S2S', 'S2SW'] and args.icsdir is None: - raise SyntaxError("An IC directory must be specified with --icsdir when running the S2S or S2SW app") - - # Add an entry for warm_start = .true. or .false. - if args.start == "warm": - args.warm_start = ".true." - else: - args.warm_start = ".false." - return args - - -def query_and_clean(dirname): - ''' - Method to query if a directory exists and gather user input for further action - ''' - - create_dir = True - if os.path.exists(dirname): - print() - print(f'directory already exists in {dirname}') - print() - overwrite = input('Do you wish to over-write [y/N]: ') - create_dir = True if overwrite in [ - 'y', 'yes', 'Y', 'YES'] else False - if create_dir: - shutil.rmtree(dirname) - - return create_dir - - -if __name__ == '__main__': - - user_inputs = input_args() - host=wfu.HostInfo(wfu.detectMachine()) - - comrot = os.path.join(user_inputs.comrot, user_inputs.pslot) - expdir = os.path.join(user_inputs.expdir, user_inputs.pslot) - - create_comrot = query_and_clean(comrot) - create_expdir = query_and_clean(expdir) - - if create_comrot: - makedirs_if_missing(comrot) - fill_COMROT(host, user_inputs) - - if create_expdir: - makedirs_if_missing(expdir) - fill_EXPDIR(user_inputs) - edit_baseconfig(host, user_inputs) diff --git a/ush/rocoto/setup_workflow.py b/ush/rocoto/setup_workflow.py deleted file mode 100755 index 43ee7ab8ca..0000000000 --- a/ush/rocoto/setup_workflow.py +++ /dev/null @@ -1,1432 +0,0 @@ -#!/usr/bin/env python3 - -''' - PROGRAM: - Create the ROCOTO workflow given the configuration of the GFS parallel - - AUTHOR: - Rahul.Mahajan - rahul.mahajan@noaa.gov - - FILE DEPENDENCIES: - 1. config files for the parallel; e.g. config.base, config.fcst[.gfs], etc. - Without these dependencies, the script will fail - - OUTPUT: - 1. PSLOT.xml: XML workflow - 2. PSLOT.crontab: crontab for ROCOTO run command -''' - -import os -import sys -import re -import numpy as np -from datetime import datetime, timedelta -from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -from collections import OrderedDict -import rocoto -import workflow_utils as wfu - -def main(): - parser = ArgumentParser(description='Setup XML workflow and CRONTAB for a GFS parallel.', formatter_class=ArgumentDefaultsHelpFormatter) - parser.add_argument('--expdir', help='full path to experiment directory containing config files', type=str, required=False, default=os.environ['PWD']) - args = parser.parse_args() - - configs = wfu.get_configs(args.expdir) - - _base = wfu.config_parser([wfu.find_config('config.base', configs)]) - - if not os.path.samefile(args.expdir, _base['EXPDIR']): - print('MISMATCH in experiment directories!') - print(f'config.base: EXPDIR = {repr(_base["EXPDIR"])}') - print(f'input arg: --expdir = {repr(args.expdir)}') - sys.exit(1) - - gfs_steps = ['prep', 'anal', 'analdiag', 'analcalc', 'gldas', 'fcst', 'postsnd', 'post', 'vrfy', 'arch'] - gfs_steps_gempak = ['gempak'] - gfs_steps_awips = ['awips'] - gfs_steps_wafs = ['wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25'] - #hyb_steps = ['eobs', 'eomg', 'eupd', 'ecen', 'efcs', 'epos', 'earc'] - metp_steps = ['metp'] - wav_steps = ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt'] - #Implement additional wave jobs at later date - wav_steps_gempak = ['wavegempak'] - wav_steps_awips = ['waveawipsbulls', 'waveawipsgridded'] -# From gfsv16b latest -# gfs_steps = ['prep', 'anal', 'gldas', 'fcst', 'postsnd', 'post', 'awips', 'gempak', 'vrfy', 'metp', 'arch'] - hyb_steps = ['eobs', 'ediag', 'eomg', 'eupd', 'ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] - - steps = gfs_steps + hyb_steps if _base.get('DOHYBVAR', 'NO') == 'YES' else gfs_steps - steps = steps + metp_steps if _base.get('DO_METP', 'NO') == 'YES' else steps - steps = steps + gfs_steps_gempak if _base.get('DO_GEMPAK', 'NO') == 'YES' else steps - steps = steps + gfs_steps_awips if _base.get('DO_AWIPS', 'NO') == 'YES' else steps - steps = steps + gfs_steps_wafs if _base.get('WAFSF', 'NO') == 'YES' else steps - steps = steps + wav_steps if _base.get('DO_WAVE', 'NO') == 'YES' else steps - steps = steps + wav_steps_gempak if _base.get('DO_GEMPAK', 'NO') == 'YES' else steps - steps = steps + wav_steps_awips if _base.get('DO_AWIPS', 'NO') == 'YES' else steps - - dict_configs = wfu.source_configs(configs, steps) - - # Check and set gfs_cyc specific variables - if dict_configs['base']['gfs_cyc'] != 0: - dict_configs['base'] = get_gfs_cyc_dates(dict_configs['base']) - - # First create workflow XML - create_xml(dict_configs) - - # Next create the crontab - wfu.create_crontab(dict_configs['base']) - - return - - -def get_gfs_cyc_dates(base): - ''' - Generate GFS dates from experiment dates and gfs_cyc choice - ''' - - base_out = base.copy() - - gfs_cyc = base['gfs_cyc'] - sdate = base['SDATE'] - edate = base['EDATE'] - - interval_gfs = wfu.get_gfs_interval(gfs_cyc) - - # Set GFS cycling dates - hrdet = 0 - if gfs_cyc == 1: - hrinc = 24 - sdate.hour - hrdet = edate.hour - elif gfs_cyc == 2: - if sdate.hour in [0, 12]: - hrinc = 12 - elif sdate.hour in [6, 18]: - hrinc = 6 - if edate.hour in [6, 18]: - hrdet = 6 - elif gfs_cyc == 4: - hrinc = 6 - sdate_gfs = sdate + timedelta(hours=hrinc) - edate_gfs = edate - timedelta(hours=hrdet) - if sdate_gfs > edate: - print('W A R N I N G!') - print('Starting date for GFS cycles is after Ending date of experiment') - print(f'SDATE = {sdate.strftime("%Y%m%d%H")}, EDATE = {edate.strftime("%Y%m%d%H")}') - print(f'SDATE_GFS = {sdate_gfs.strftime("%Y%m%d%H")}, EDATE_GFS = {edate_gfs.strftime("%Y%m%d%H")}') - gfs_cyc = 0 - - base_out['gfs_cyc'] = gfs_cyc - base_out['SDATE_GFS'] = sdate_gfs - base_out['EDATE_GFS'] = edate_gfs - base_out['INTERVAL_GFS'] = interval_gfs - - fhmax_gfs = {} - for hh in ['00', '06', '12', '18']: - fhmax_gfs[hh] = base.get(f'FHMAX_GFS_{hh}', 'FHMAX_GFS_00') - base_out['FHMAX_GFS'] = fhmax_gfs - - return base_out - - -def get_preamble(): - ''' - Generate preamble for XML - ''' - - strings = [] - - strings.append('\n') - strings.append('\n') - - return ''.join(strings) - - -def get_definitions(base): - ''' - Create entities related to the experiment - ''' - - machine = base.get('machine', wfu.detectMachine()) - scheduler = wfu.get_scheduler(machine) - hpssarch = base.get('HPSSARCH', 'NO').upper() - - strings = [] - - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - - if base['gfs_cyc'] != 0: - strings.append(get_gfs_dates(base)) - strings.append('\n') - - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - if scheduler in ['slurm']: - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append(f'\t\n') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append('\t\n') - strings.append('\t\n') - strings.append('\t\n') - strings.append('\n') - - return ''.join(strings) - - -def get_gfs_dates(base): - ''' - Generate GFS dates entities - ''' - - strings = [] - - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - - return ''.join(strings) - - -def get_gdasgfs_resources(dict_configs, cdump='gdas'): - ''' - Create GDAS or GFS resource entities - ''' - - base = dict_configs['base'] - machine = base.get('machine', wfu.detectMachine()) - scheduler = wfu.get_scheduler(machine) - do_bufrsnd = base.get('DO_BUFRSND', 'NO').upper() - do_gempak = base.get('DO_GEMPAK', 'NO').upper() - do_awips = base.get('DO_AWIPS', 'NO').upper() - do_wafs = base.get('WAFSF', 'NO').upper() - do_metp = base.get('DO_METP', 'NO').upper() - do_gldas = base.get('DO_GLDAS', 'NO').upper() - do_wave = base.get('DO_WAVE', 'NO').upper() - do_wave_cdump = base.get('WAVE_CDUMP', 'BOTH').upper() - reservation = base.get('RESERVATION', 'NONE').upper() - - #tasks = ['prep', 'anal', 'fcst', 'post', 'vrfy', 'arch'] - tasks = ['prep', 'anal', 'analcalc'] - - if cdump in ['gdas']: - tasks += ['analdiag'] - if cdump in ['gdas'] and do_gldas in ['Y', 'YES']: - tasks += ['gldas'] - if cdump in ['gdas'] and do_wave in ['Y', 'YES'] and do_wave_cdump in ['GDAS', 'BOTH']: - #tasks += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostbndpnt', 'wavepostpnt', 'wavestat'] - tasks += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt'] - - tasks += ['fcst', 'post', 'vrfy', 'arch'] - - if cdump in ['gfs'] and do_wave in ['Y', 'YES'] and do_wave_cdump in ['GFS', 'BOTH']: - #tasks += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostbndpnt', 'wavepostpnt', 'wavestat'] - tasks += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt'] - if cdump in ['gfs'] and do_bufrsnd in ['Y', 'YES']: - tasks += ['postsnd'] - if cdump in ['gfs'] and do_gempak in ['Y', 'YES']: - tasks += ['gempak'] - if cdump in ['gfs'] and do_wave in ['Y', 'YES'] and do_gempak in ['Y', 'YES']: - tasks += ['wavegempak'] - if cdump in ['gfs'] and do_awips in ['Y', 'YES']: - tasks += ['awips'] - if cdump in ['gfs'] and do_wafs in ['Y', 'YES']: - tasks += ['wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25'] - if cdump in ['gfs'] and do_metp in ['Y', 'YES']: - tasks += ['metp'] - if cdump in ['gfs'] and do_wave in ['Y', 'YES'] and do_awips in ['Y', 'YES']: - tasks += ['waveawipsbulls', 'waveawipsgridded'] - - dict_resources = OrderedDict() - - for task in tasks: - - cfg = dict_configs[task] - - wtimestr, resstr, queuestr, memstr, natstr = wfu.get_resources(machine, cfg, task, reservation, cdump=cdump) - taskstr = f'{task.upper()}_{cdump.upper()}' - - strings = [] - strings.append(f'\t\n') - if scheduler in ['slurm']: - if task in ['arch']: - strings.append(f'\t\n') - else: - strings.append(f'\t\n') - - strings.append(f'\t\n') - strings.append(f'\t\n') - if len(memstr) != 0: - strings.append(f'\t\n') - strings.append(f'\t\n') - - dict_resources[f'{cdump}{task}'] = ''.join(strings) - - return dict_resources - - -def get_hyb_resources(dict_configs): - ''' - Create hybrid resource entities - ''' - - base = dict_configs['base'] - machine = base.get('machine', wfu.detectMachine()) - scheduler = wfu.get_scheduler(machine) - lobsdiag_forenkf = base.get('lobsdiag_forenkf', '.false.').upper() - eupd_cyc= base.get('EUPD_CYC', 'gdas').upper() - reservation = base.get('RESERVATION', 'NONE').upper() - - dict_resources = OrderedDict() - - # These tasks can be run in either or both cycles - if lobsdiag_forenkf in ['.T.', '.TRUE.']: - tasks1 = ['eobs', 'ediag', 'eupd', 'echgres'] - else: - tasks1 = ['eobs', 'eomg', 'eupd', 'echgres'] - - if eupd_cyc in ['BOTH']: - cdumps = ['gfs', 'gdas'] - elif eupd_cyc in ['GFS']: - cdumps = ['gfs'] - elif eupd_cyc in ['GDAS']: - cdumps = ['gdas'] - - for cdump in cdumps: - for task in tasks1: - - cfg = dict_configs['eobs'] if task in ['eomg'] else dict_configs[task] - - wtimestr, resstr, queuestr, memstr, natstr = wfu.get_resources(machine, cfg, task, reservation, cdump=cdump) - - taskstr = f'{task.upper()}_{cdump.upper()}' - - strings = [] - - strings.append(f'\t\n') - if scheduler in ['slurm']: - strings.append(f'\t\n') - strings.append(f'\t\n') - strings.append(f'\t\n') - if len(memstr) != 0: - strings.appendf(f'\t\n') - strings.append(f'\t\n') - - dict_resources[f'{cdump}{task}'] = ''.join(strings) - - - # These tasks are always run as part of the GDAS cycle - cdump = 'gdas' - tasks2 = ['ecen', 'esfc', 'efcs', 'epos', 'earc'] - for task in tasks2: - - cfg = dict_configs[task] - - wtimestr, resstr, queuestr, memstr, natstr = wfu.get_resources(machine, cfg, task, reservation, cdump=cdump) - - taskstr = f'{task.upper()}_{cdump.upper()}' - - strings = [] - strings.append(f'\t\n') - if scheduler in ['slurm']: - if task in ['earc']: - strings.append(f'\t\n') - else: - strings.append(f'\t\n') - - strings.append(f'\t\n') - strings.append(f'\t\n') - if len(memstr) != 0: - strings.append(f'\t\n') - strings.append(f'\t\n') - - dict_resources[f'{cdump}{task}'] = ''.join(strings) - - return dict_resources - - -def get_gdasgfs_tasks(dict_configs, cdump='gdas'): - ''' - Create GDAS or GFS tasks - ''' - - envars = [] - if wfu.get_scheduler(wfu.detectMachine()) in ['slurm']: - envars.append(rocoto.create_envar(name='SLURM_SET', value='YES')) - envars.append(rocoto.create_envar(name='RUN_ENVIR', value='&RUN_ENVIR;')) - envars.append(rocoto.create_envar(name='HOMEgfs', value='&HOMEgfs;')) - envars.append(rocoto.create_envar(name='EXPDIR', value='&EXPDIR;')) - envars.append(rocoto.create_envar(name='CDATE', value='@Y@m@d@H')) - envars.append(rocoto.create_envar(name='CDUMP', value=f'{cdump}')) - envars.append(rocoto.create_envar(name='PDY', value='@Y@m@d')) - envars.append(rocoto.create_envar(name='cyc', value='@H')) - - base = dict_configs['base'] - gfs_cyc = base.get('gfs_cyc', 0) - gldas_cyc = base.get('gldas_cyc', 0) - dohybvar = base.get('DOHYBVAR', 'NO').upper() - eupd_cyc = base.get('EUPD_CYC', 'gdas').upper() - do_bufrsnd = base.get('DO_BUFRSND', 'NO').upper() - do_gempak = base.get('DO_GEMPAK', 'NO').upper() - do_awips = base.get('DO_AWIPS', 'NO').upper() - do_wafs = base.get('WAFSF', 'NO').upper() - do_metp = base.get('DO_METP', 'NO').upper() - do_gldas = base.get('DO_GLDAS', 'NO').upper() - do_wave = base.get('DO_WAVE', 'NO').upper() - if do_wave in ['YES']: - do_wave_bnd = dict_configs['wavepostsbs'].get('DOBNDPNT_WAVE', "YES").upper() - do_wave_cdump = base.get('WAVE_CDUMP', 'BOTH').upper() - dumpsuffix = base.get('DUMP_SUFFIX', '') - gridsuffix = base.get('SUFFIX', '') - - dict_tasks = OrderedDict() - - # prep - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{"gdas"}post', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/gdas.@Y@m@d/@H/atmos/gdas.t@Hz.atmf009{gridsuffix}' - dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&DMPDIR;/{cdump}{dumpsuffix}.@Y@m@d/@H/{cdump}.t@Hz.updated.status.tm00.bufr_d' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - - gfs_enkf = True if eupd_cyc in ['BOTH', 'GFS'] and dohybvar in ['Y', 'YES'] else False - - if gfs_enkf and cdump in ['gfs']: - if gfs_cyc == 4: - task = wfu.create_wf_task('prep', cdump=cdump, envar=envars, dependency=dependencies) - else: - task = wfu.create_wf_task('prep', cdump=cdump, envar=envars, dependency=dependencies, cycledef='gdas') - - else: - task = wfu.create_wf_task('prep', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}prep'] = task - - # wave tasks in gdas or gfs or both - if do_wave_cdump in ['BOTH']: - cdumps = ['gfs', 'gdas'] - elif do_wave_cdump in ['GFS']: - cdumps = ['gfs'] - elif do_wave_cdump in ['GDAS']: - cdumps = ['gdas'] - - # waveinit - if do_wave in ['Y', 'YES'] and cdump in cdumps: - deps = [] - dep_dict = {'type': 'task', 'name': '{cdump}prep'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) - task = wfu.create_wf_task('waveinit', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks['{cdump}waveinit'] = task - - # waveprep - if do_wave in ['Y', 'YES'] and cdump in cdumps: - deps = [] - dep_dict = {'type': 'task', 'name': '{cdump}waveinit'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('waveprep', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks['{cdump}waveprep'] = task - - # anal - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}prep'} - deps.append(rocoto.add_dependency(dep_dict)) - if dohybvar in ['y', 'Y', 'yes', 'YES']: - dep_dict = {'type': 'metatask', 'name': f'{"gdas"}epmn', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - else: - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('anal', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}anal'] = task - - # analcalc - deps1 = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.loginc.txt' - dep_dict = {'type': 'data', 'data': data} - deps1.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}anal'} - deps.append(rocoto.add_dependency(dep_dict)) - if dohybvar in ['y', 'Y', 'yes', 'YES'] and cdump == 'gdas': - dep_dict = {'type': 'task', 'name': f'{"gdas"}echgres', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - else: - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('analcalc', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}analcalc'] = task - - # analdiag - if cdump in ['gdas']: - deps1 = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.loginc.txt' - dep_dict = {'type': 'data', 'data': data} - deps1.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}anal'} - deps1.append(rocoto.add_dependency(dep_dict)) - dependencies1 = rocoto.create_dependency(dep_condition='or', dep=deps1) - - deps2 = [] - deps2 = dependencies1 - dep_dict = {'type': 'cycleexist', 'offset': '-06:00:00'} - deps2.append(rocoto.add_dependency(dep_dict)) - dependencies2 = rocoto.create_dependency(dep_condition='and', dep=deps2) - - task = wfu.create_wf_task('analdiag', cdump=cdump, envar=envars, dependency=dependencies2) - - dict_tasks[f'{cdump}analdiag'] = task - - # gldas - if cdump in ['gdas'] and do_gldas in ['Y', 'YES']: - deps1 = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.loginc.txt' - dep_dict = {'type': 'data', 'data': data} - deps1.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}anal'} - deps1.append(rocoto.add_dependency(dep_dict)) - dependencies1 = rocoto.create_dependency(dep_condition='or', dep=deps1) - - deps2 = [] - deps2 = dependencies1 - dep_dict = {'type': 'cycleexist', 'offset': '-06:00:00'} - deps2.append(rocoto.add_dependency(dep_dict)) - dependencies2 = rocoto.create_dependency(dep_condition='and', dep=deps2) - - task = wfu.create_wf_task('gldas', cdump=cdump, envar=envars, dependency=dependencies2) - dict_tasks[f'{cdump}gldas'] = task - - # fcst - deps1 = [] - #data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.loginc.txt' - #dep_dict = {'type': 'data', 'data': data} - #deps1.append(rocoto.add_dependency(dep_dict)) - if cdump in ['gdas']: - dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} - deps1.append(rocoto.add_dependency(dep_dict)) - if do_gldas in ['Y', 'YES']: - dep_dict = {'type': 'task', 'name': f'{cdump}gldas'} - deps1.append(rocoto.add_dependency(dep_dict)) - else: - dep_dict = {'type': 'task', 'name': f'{cdump}analcalc'} - deps1.append(rocoto.add_dependency(dep_dict)) - elif cdump in ['gfs']: - dep_dict = {'type': 'task', 'name': f'{cdump}anal'} - deps1.append(rocoto.add_dependency(dep_dict)) - dependencies1 = rocoto.create_dependency(dep_condition='or', dep=deps1) - - if do_wave in ['Y', 'YES'] and cdump in cdumps: - deps2 = [] - deps2 = dependencies1 - dep_dict = {'type': 'task', 'name': f'{cdump}waveprep'} - deps2.append(rocoto.add_dependency(dep_dict)) - dependencies2 = rocoto.create_dependency(dep_condition='and', dep=deps2) - task = wfu.create_wf_task('fcst', cdump=cdump, envar=envars, dependency=dependencies2) - else: - task = wfu.create_wf_task('fcst', cdump=cdump, envar=envars, dependency=dependencies1) - - dict_tasks[f'{cdump}fcst'] = task - - # post - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.log#dep#.txt' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}fcst'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) - fhrgrp = rocoto.create_envar(name='FHRGRP', value='#grp#') - fhrlst = rocoto.create_envar(name='FHRLST', value='#lst#') - ROTDIR = rocoto.create_envar(name='ROTDIR', value='&ROTDIR;') - postenvars = envars + [fhrgrp] + [fhrlst] + [ROTDIR] - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = get_postgroups(dict_configs['post'], cdump=cdump) - vardict = {varname2: varval2, varname3: varval3} - task = wfu.create_wf_task('post', cdump=cdump, envar=postenvars, dependency=dependencies, - metatask='post', varname=varname1, varval=varval1, vardict=vardict) - - dict_tasks[f'{cdump}post'] = task - - # wavepostsbs - if do_wave in ['Y', 'YES'] and cdump in cdumps: - deps = [] - for wave_grid in dict_configs['wavepostsbs']['waveGRD'].split(): - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/wave/rundata/{cdump}wave.out_grd.{wave_grid}.@Y@m@d.@H0000' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wavepostsbs', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks[f'{cdump}wavepostsbs'] = task - - # wavepostbndpnt - if do_wave in ['Y', 'YES'] and do_wave_bnd in ['YES'] and cdump in ['gfs']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}fcst'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wavepostbndpnt', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks[f'{cdump}wavepostbndpnt'] = task - - # wavepostbndpntbll - if do_wave in ['Y', 'YES'] and do_wave_bnd in ['YES'] and cdump in ['gfs']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.logf180.txt' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostbndpnt'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wavepostbndpntbll', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks[f'{cdump}wavepostbndpntbll'] = task - - # wavepostpnt - if do_wave in ['Y', 'YES'] and cdump in ['gdas', 'gfs']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}fcst'} - deps.append(rocoto.add_dependency(dep_dict)) - if do_wave_bnd in ['YES'] and cdump in ['gfs']: - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostbndpntbll'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wavepostpnt', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks[f'{cdump}wavepostpnt'] = task - - # wavegempak - if do_wave in ['Y', 'YES'] and do_gempak in ['Y', 'YES'] and cdump in ['gfs']: - deps = [] - dep_dict = {'type':'task', 'name':f'{cdump}wavepostsbs'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wavegempak', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks[f'{cdump}wavegempak'] = task - - # waveawipsgridded - if do_wave in ['Y', 'YES'] and do_awips in ['Y', 'YES'] and cdump in ['gfs']: - deps = [] - dep_dict = {'type':'task', 'name':f'{cdump}wavepostsbs'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('waveawipsgridded', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks[f'{cdump}waveawipsgridded'] = task - - # waveawipsbulls - if do_wave in ['Y', 'YES'] and do_awips in ['Y', 'YES'] and cdump in ['gfs']: - deps = [] - dep_dict = {'type':'task', 'name':f'{cdump}wavepostsbs'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type':'task', 'name':f'{cdump}wavepostpnt'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('waveawipsbulls', cdump=cdump, envar=envars, dependency=dependencies) - dict_tasks[f'{cdump}waveawipsbulls'] = task - - # wavestat - #if do_wave in ['Y', 'YES'] and cdump in cdumps: - # deps = [] - # dep_dict = {'type':'task', 'name':'%swavepost' % cdump} - # deps.append(rocoto.add_dependency(dep_dict)) - # dependencies = rocoto.create_dependency(dep=deps) - # task = wfu.create_wf_task('wavestat', cdump=cdump, envar=envars, dependency=dependencies) - # dict_tasks['%swavestat' % cdump] = task - - # vrfy - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('vrfy', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}vrfy'] = task - - # metp - if cdump in ['gfs'] and do_metp in ['Y', 'YES']: - deps = [] - dep_dict = {'type':'metatask', 'name':f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - sdate_gfs = rocoto.create_envar(name='SDATE_GFS', value='&SDATE_GFS;') - metpcase = rocoto.create_envar(name='METPCASE', value='#metpcase#') - metpenvars = envars + [sdate_gfs] + [metpcase] - varname1 = 'metpcase' - varval1 = 'g2g1 g2o1 pcp1' - task = wfu.create_wf_task('metp', cdump=cdump, envar=metpenvars, dependency=dependencies, - metatask='metp', varname=varname1, varval=varval1) - dict_tasks[f'{cdump}metp'] = task - - #postsnd - if cdump in ['gfs'] and do_bufrsnd in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}fcst'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('postsnd', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}postsnd'] = task - - # awips - if cdump in ['gfs'] and do_awips in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - fhrgrp = rocoto.create_envar(name='FHRGRP', value='#grp#') - fhrlst = rocoto.create_envar(name='FHRLST', value='#lst#') - ROTDIR = rocoto.create_envar(name='ROTDIR', value='&ROTDIR;') - awipsenvars = envars + [fhrgrp] + [fhrlst] + [ROTDIR] - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = get_awipsgroups(dict_configs['awips'], cdump=cdump) - vardict = {varname2: varval2, varname3: varval3} - task = wfu.create_wf_task('awips', cdump=cdump, envar=awipsenvars, dependency=dependencies, - metatask='awips', varname=varname1, varval=varval1, vardict=vardict) - - dict_tasks[f'{cdump}awips'] = task - - # gempak - if cdump in ['gfs'] and do_gempak in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('gempak', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}gempak'] = task - - # wafs - if cdump in ['gfs'] and do_wafs in ['Y', 'YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if006' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if012' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if015' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if018' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if021' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if024' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if027' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if030' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if033' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if036' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wafs', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}wafs'] = task - - # wafsgcip - if cdump in ['gfs'] and do_wafs in ['Y', 'YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if006' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if012' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if015' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if018' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if021' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if024' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if027' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if030' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if033' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if036' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wafsgcip', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}wafsgcip'] = task - - # wafsgrib2 - if cdump in ['gfs'] and do_wafs in ['Y', 'YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if006' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if012' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if015' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if018' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if021' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if024' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if027' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if030' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if033' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if036' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wafsgrib2', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}wafsgrib2'] = task - - # wafsgrib20p25 - if cdump in ['gfs'] and do_wafs in ['Y', 'YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if006' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if012' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if015' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if018' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if021' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if024' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if027' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if030' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if033' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if036' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wafsgrib20p25', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}wafsgrib20p25'] = task - - # wafsblending - if cdump in ['gfs'] and do_wafs in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}wafsgrib2'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wafsblending', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}wafsblending'] = task - - # wafsblending0p25 - if cdump in ['gfs'] and do_wafs in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}wafsgrib20p25'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wafsblending0p25', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}wafsblending0p25'] = task - - # arch - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}vrfy'} - deps.append(rocoto.add_dependency(dep_dict)) - if cdump in ['gfs'] and do_metp in ['Y', 'YES']: - dep_dict = {'type':'metatask', 'name':f'{cdump}metp'} - deps.append(rocoto.add_dependency(dep_dict)) - if do_wave in ['Y', 'YES']: - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostsbs'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostpnt'} - deps.append(rocoto.add_dependency(dep_dict)) - if do_wave_bnd in ['YES'] and cdump in ['gfs']: - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostbndpnt'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('arch', cdump=cdump, envar=envars, dependency=dependencies) - - dict_tasks[f'{cdump}arch'] = task - - return dict_tasks - - -def get_hyb_tasks(dict_configs, cycledef='enkf'): - ''' - Create Hybrid tasks - ''' - - # Determine groups based on ensemble size and grouping - base = dict_configs['base'] - nens = base['NMEM_ENKF'] - lobsdiag_forenkf = base.get('lobsdiag_forenkf', '.false.').upper() - eupd_cyc = base.get('EUPD_CYC', 'gdas').upper() - - eobs = dict_configs['eobs'] - nens_eomg = eobs['NMEM_EOMGGRP'] - neomg_grps = nens / nens_eomg - EOMGGROUPS = ' '.join([f'{x:02d}' for x in range(1, int(neomg_grps) + 1)]) - - efcs = dict_configs['efcs'] - nens_efcs = efcs['NMEM_EFCSGRP'] - nefcs_grps = nens / nens_efcs - EFCSGROUPS = ' '.join([f'{x:02d}' for x in range(1, int(nefcs_grps) + 1)]) - - earc = dict_configs['earc'] - nens_earc = earc['NMEM_EARCGRP'] - nearc_grps = nens / nens_earc - EARCGROUPS = ' '.join([f'{x:02d}' for x in range(0, int(nearc_grps) + 1)]) - - envars = [] - if wfu.get_scheduler(wfu.detectMachine()) in ['slurm']: - envars.append(rocoto.create_envar(name='SLURM_SET', value='YES')) - envars.append(rocoto.create_envar(name='RUN_ENVIR', value='&RUN_ENVIR;')) - envars.append(rocoto.create_envar(name='HOMEgfs', value='&HOMEgfs;')) - envars.append(rocoto.create_envar(name='EXPDIR', value='&EXPDIR;')) - envars.append(rocoto.create_envar(name='CDATE', value='@Y@m@d@H')) - #envars.append(rocoto.create_envar(name='CDUMP', value=f'{cdump}')) - envars.append(rocoto.create_envar(name='PDY', value='@Y@m@d')) - envars.append(rocoto.create_envar(name='cyc', value='@H')) - - ensgrp = rocoto.create_envar(name='ENSGRP', value='#grp#') - - dict_tasks = OrderedDict() - - if eupd_cyc in ['BOTH']: - cdumps = ['gfs', 'gdas'] - elif eupd_cyc in ['GFS']: - cdumps = ['gfs'] - elif eupd_cyc in ['GDAS']: - cdumps = ['gdas'] - - for cdump in cdumps: - - envar_cdump = rocoto.create_envar(name='CDUMP', value=f'{cdump}') - envars1 = envars + [envar_cdump] - - # eobs - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}prep'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'metatask', 'name': f'{"gdas"}epmn', 'offset': '-06:00:00'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('eobs', cdump=cdump, envar=envars1, dependency=dependencies, cycledef=cycledef) - - dict_tasks[f'{cdump}eobs'] = task - - # eomn, eomg - if lobsdiag_forenkf in ['.F.', '.FALSE.']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}eobs'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - eomgenvars= envars1 + [ensgrp] - task = wfu.create_wf_task('eomg', cdump=cdump, envar=eomgenvars, dependency=dependencies, - metatask='eomn', varname='grp', varval=EOMGGROUPS, cycledef=cycledef) - - dict_tasks[f'{cdump}eomn'] = task - - # ediag - else: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}eobs'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('ediag', cdump=cdump, envar=envars1, dependency=dependencies, cycledef=cycledef) - - dict_tasks[f'{cdump}ediag'] = task - - # eupd - deps = [] - if lobsdiag_forenkf in ['.F.', '.FALSE.']: - dep_dict = {'type': 'metatask', 'name': f'{cdump}eomn'} - else: - dep_dict = {'type': 'task', 'name': f'{cdump}ediag'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('eupd', cdump=cdump, envar=envars1, dependency=dependencies, cycledef=cycledef) - - dict_tasks[f'{cdump}eupd'] = task - - # All hybrid tasks beyond this point are always executed in the GDAS cycle - cdump = 'gdas' - envar_cdump = rocoto.create_envar(name='CDUMP', value=f'{cdump}') - envars1 = envars + [envar_cdump] - cdump_eupd = 'gfs' if eupd_cyc in ['GFS'] else 'gdas' - - # ecmn, ecen - deps1 = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.loganl.txt' - dep_dict = {'type': 'data', 'data': data} - deps1.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}analcalc'} - deps1.append(rocoto.add_dependency(dep_dict)) - dependencies1 = rocoto.create_dependency(dep_condition='or', dep=deps1) - - deps2 = [] - deps2 = dependencies1 - dep_dict = {'type': 'task', 'name': f'{cdump_eupd}eupd'} - deps2.append(rocoto.add_dependency(dep_dict)) - dependencies2 = rocoto.create_dependency(dep_condition='and', dep=deps2) - - fhrgrp = rocoto.create_envar(name='FHRGRP', value='#grp#') - fhrlst = rocoto.create_envar(name='FHRLST', value='#lst#') - ecenenvars = envars1 + [fhrgrp] + [fhrlst] - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = get_ecengroups(dict_configs, dict_configs['ecen'], cdump=cdump) - vardict = {varname2: varval2, varname3: varval3} - task = wfu.create_wf_task('ecen', cdump=cdump, envar=ecenenvars, dependency=dependencies2, - metatask='ecmn', varname=varname1, varval=varval1, vardict=vardict) - - dict_tasks[f'{cdump}ecmn'] = task - - # esfc - deps1 = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.loganl.txt' - dep_dict = {'type': 'data', 'data': data} - deps1.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}analcalc'} - deps1.append(rocoto.add_dependency(dep_dict)) - dependencies1 = rocoto.create_dependency(dep_condition='or', dep=deps1) - - deps2 = [] - deps2 = dependencies1 - dep_dict = {'type': 'task', 'name': f'{cdump_eupd}eupd'} - deps2.append(rocoto.add_dependency(dep_dict)) - dependencies2 = rocoto.create_dependency(dep_condition='and', dep=deps2) - task = wfu.create_wf_task('esfc', cdump=cdump, envar=envars1, dependency=dependencies2, cycledef=cycledef) - - dict_tasks[f'{cdump}esfc'] = task - - # efmn, efcs - deps1 = [] - dep_dict = {'type': 'metatask', 'name': f'{cdump}ecmn'} - deps1.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}esfc'} - deps1.append(rocoto.add_dependency(dep_dict)) - dependencies1 = rocoto.create_dependency(dep_condition='and', dep=deps1) - - deps2 = [] - deps2 = dependencies1 - dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} - deps2.append(rocoto.add_dependency(dep_dict)) - dependencies2 = rocoto.create_dependency(dep_condition='or', dep=deps2) - - efcsenvars = envars1 + [ensgrp] - task = wfu.create_wf_task('efcs', cdump=cdump, envar=efcsenvars, dependency=dependencies2, - metatask='efmn', varname='grp', varval=EFCSGROUPS, cycledef=cycledef) - - dict_tasks[f'{cdump}efmn'] = task - - # echgres - deps1 = [] - dep_dict = {'type': 'task', 'name': f'{cdump}fcst'} - deps1.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'metatask', 'name': f'{cdump}efmn'} - deps1.append(rocoto.add_dependency(dep_dict)) - dependencies1 = rocoto.create_dependency(dep_condition='and', dep=deps1) - task = wfu.create_wf_task('echgres', cdump=cdump, envar=envars1, dependency=dependencies1, cycledef=cycledef) - - dict_tasks[f'{cdump}echgres'] = task - - # epmn, epos - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{cdump}efmn'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - fhrgrp = rocoto.create_envar(name='FHRGRP', value='#grp#') - fhrlst = rocoto.create_envar(name='FHRLST', value='#lst#') - eposenvars = envars1 + [fhrgrp] + [fhrlst] - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = get_eposgroups(dict_configs['epos'], cdump=cdump) - vardict = {varname2: varval2, varname3: varval3} - task = wfu.create_wf_task('epos', cdump=cdump, envar=eposenvars, dependency=dependencies, - metatask='epmn', varname=varname1, varval=varval1, vardict=vardict) - - dict_tasks[f'{cdump}epmn'] = task - - # eamn, earc - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{cdump}epmn'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - earcenvars = envars1 + [ensgrp] - task = wfu.create_wf_task('earc', cdump=cdump, envar=earcenvars, dependency=dependencies, - metatask='eamn', varname='grp', varval=EARCGROUPS, cycledef=cycledef) - - dict_tasks[f'{cdump}eamn'] = task - - return dict_tasks - - -def get_workflow_header(base): - ''' - Create the workflow header block - ''' - - strings = [] - - strings.append('\n') - strings.append(']>\n') - strings.append('\n') - strings.append('\n') - strings.append('\n') - strings.append('\t&EXPDIR;/logs/@Y@m@d@H.log\n') - strings.append('\n') - strings.append('\t\n') - strings.append('\t&SDATE; &SDATE; 06:00:00\n') - strings.append('\t&SDATE; &EDATE; 06:00:00\n') - strings.append('\t&SDATE; &EDATE; 06:00:00\n') - if base['gfs_cyc'] != 0: - strings.append('\t&SDATE_GFS; &EDATE_GFS; &INTERVAL_GFS;\n') - - strings.append('\n') - - return ''.join(strings) - - -def get_workflow_footer(): - ''' - Generate workflow footer - ''' - - strings = [] - strings.append('\n\n') - - return ''.join(strings) - - -def get_postgroups(post, cdump='gdas'): - - fhmin = post['FHMIN'] - fhmax = post['FHMAX'] - fhout = post['FHOUT'] - - # Get a list of all forecast hours - if cdump in ['gdas']: - fhrs = range(fhmin, fhmax+fhout, fhout) - elif cdump in ['gfs']: - fhmax = np.max([post['FHMAX_GFS_00'],post['FHMAX_GFS_06'],post['FHMAX_GFS_12'],post['FHMAX_GFS_18']]) - fhout = post['FHOUT_GFS'] - fhmax_hf = post['FHMAX_HF_GFS'] - fhout_hf = post['FHOUT_HF_GFS'] - fhrs_hf = range(fhmin, fhmax_hf+fhout_hf, fhout_hf) - fhrs = list(fhrs_hf) + list(range(fhrs_hf[-1]+fhout, fhmax+fhout, fhout)) - - npostgrp = post['NPOSTGRP'] - ngrps = npostgrp if len(fhrs) > npostgrp else len(fhrs) - - fhrs = [f'f{f:03d}' for f in fhrs] - fhrs = np.array_split(fhrs, ngrps) - fhrs = [f.tolist() for f in fhrs] - - fhrgrp = ' '.join(['anl'] + [f'_{f[0]}-{f[-1]}' for f in fhrs]) - fhrdep = ' '.join(['anl'] + [f[-1] for f in fhrs]) - fhrlst = ' '.join(['anl'] + ['_'.join(f) for f in fhrs]) - - return fhrgrp, fhrdep, fhrlst - -def get_awipsgroups(awips, cdump='gdas'): - - fhmin = awips['FHMIN'] - fhmax = awips['FHMAX'] - fhout = awips['FHOUT'] - - # Get a list of all forecast hours - if cdump in ['gdas']: - fhrs = range(fhmin, fhmax+fhout, fhout) - elif cdump in ['gfs']: - fhmax = np.max([awips['FHMAX_GFS_00'],awips['FHMAX_GFS_06'],awips['FHMAX_GFS_12'],awips['FHMAX_GFS_18']]) - fhout = awips['FHOUT_GFS'] - fhmax_hf = awips['FHMAX_HF_GFS'] - fhout_hf = awips['FHOUT_HF_GFS'] - if fhmax > 240: - fhmax = 240 - if fhmax_hf > 240: - fhmax_hf = 240 - fhrs_hf = range(fhmin, fhmax_hf+fhout_hf, fhout_hf) - fhrs = fhrs_hf + range(fhrs_hf[-1]+fhout, fhmax+fhout, fhout) - - nawipsgrp = awips['NAWIPSGRP'] - ngrps = nawipsgrp if len(fhrs) > nawipsgrp else len(fhrs) - - fhrs = [f'f{f:03d}' for f in fhrs] - fhrs = np.array_split(fhrs, ngrps) - fhrs = [f.tolist() for f in fhrs] - - fhrgrp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) - fhrdep = ' '.join([f[-1] for f in fhrs]) - fhrlst = ' '.join(['_'.join(f) for f in fhrs]) - - return fhrgrp, fhrdep, fhrlst - -def get_ecengroups(dict_configs, ecen, cdump='gdas'): - - base = dict_configs['base'] - - if base.get('DOIAU_ENKF', 'NO') == 'YES' : - fhrs = list(base.get('IAUFHRS','6').split(',')) - ifhrs = [f'f00{ff}' for ff in fhrs] - ifhrs0 = ifhrs[0] - nfhrs = len(fhrs) - - ifhrs = [f'f00{ff}' for ff in fhrs] - ifhrs0 = ifhrs[0] - nfhrs = len(fhrs) - - necengrp = ecen['NECENGRP'] - ngrps = necengrp if len(fhrs) > necengrp else len(fhrs) - - ifhrs = np.array_split(ifhrs, ngrps) - - fhrgrp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) - fhrdep = ' '.join([f[-1] for f in ifhrs]) - fhrlst = ' '.join(['_'.join(f) for f in ifhrs]) - - else: - fhrgrp='000' - fhrdep='f006' - fhrlst='f006' - - return fhrgrp, fhrdep, fhrlst - -def get_eposgroups(epos, cdump='gdas'): - - fhmin = epos['FHMIN_ENKF'] - fhmax = epos['FHMAX_ENKF'] - fhout = epos['FHOUT_ENKF'] - fhrs = range(fhmin, fhmax+fhout, fhout) - - neposgrp = epos['NEPOSGRP'] - ngrps = neposgrp if len(fhrs) > neposgrp else len(fhrs) - - fhrs = [f'f{f:03d}' for f in fhrs] - fhrs = np.array_split(fhrs, ngrps) - fhrs = [f.tolist() for f in fhrs] - - fhrgrp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) - fhrdep = ' '.join([f[-1] for f in fhrs]) - fhrlst = ' '.join(['_'.join(f) for f in fhrs]) - - return fhrgrp, fhrdep, fhrlst - - -def dict_to_strings(dict_in): - - strings = [] - for key in dict_in.keys(): - strings.append(dict_in[key]) - strings.append('\n') - - return ''.join(strings) - - -def create_xml(dict_configs): - ''' - Given an dictionary of sourced config files, - create the workflow XML - ''' - - from builtins import any as b_any - #from __builtin__ import any as b_any - - base = dict_configs['base'] - dohybvar = base.get('DOHYBVAR', 'NO').upper() - gfs_cyc = base.get('gfs_cyc', 0) - eupd_cyc = base.get('EUPD_CYC', 'gdas').upper() - - # Start collecting workflow pieces - preamble = get_preamble() - definitions = get_definitions(base) - workflow_header = get_workflow_header(base) - workflow_footer = get_workflow_footer() - - # Get GDAS related entities, resources, workflow - dict_gdas_resources = get_gdasgfs_resources(dict_configs) - dict_gdas_tasks = get_gdasgfs_tasks(dict_configs) - - # Get hybrid related entities, resources, workflow - if dohybvar in ['Y', 'YES']: - - dict_hyb_resources = get_hyb_resources(dict_configs) - dict_hyb_tasks = get_hyb_tasks(dict_configs) - - # Removes &MEMORY_JOB_DUMP post mortem from hyb tasks - hyp_tasks = {'gdaseobs':'gdaseobs', - 'gdasediag':'gdasediag', - 'gdaseomg':'gdaseomn', - 'gdaseupd':'gdaseupd', - 'gdasecen':'gdasecmn', - 'gdasesfc':'gdasesfc', - 'gdasefcs':'gdasefmn', - 'gdasepos':'gdasepmn', - 'gdasearc':'gdaseamn', - 'gdasechgres':'gdasechgres'} - for each_task, each_resource_string in dict_hyb_resources.items(): - #print(each_task,hyp_tasks[each_task]) - #print(dict_hyb_tasks[hyp_tasks[each_task]]) - if 'MEMORY' not in each_resource_string: - if each_task in dict_hyb_tasks: - temp_task_string = [] - for each_line in re.split(r'(\s+)', dict_hyb_tasks[each_task]): - if 'memory' not in each_line: - temp_task_string.append(each_line) - dict_hyb_tasks[each_task] = ''.join(temp_task_string) - if hyp_tasks[each_task] in dict_hyb_tasks: - temp_task_string = [] - for each_line in re.split(r'(\s+)', dict_hyb_tasks[hyp_tasks[each_task]]): - if 'memory' not in each_line: - temp_task_string.append(each_line) - dict_hyb_tasks[hyp_tasks[each_task]] = ''.join(temp_task_string) - - # Get GFS cycle related entities, resources, workflow - dict_gfs_resources = get_gdasgfs_resources(dict_configs, cdump='gfs') - dict_gfs_tasks = get_gdasgfs_tasks(dict_configs, cdump='gfs') - - # Removes &MEMORY_JOB_DUMP post mortem from gdas tasks - for each_task, each_resource_string in dict_gdas_resources.items(): - if each_task not in dict_gdas_tasks: - continue - if 'MEMORY' not in each_resource_string: - temp_task_string = [] - for each_line in re.split(r'(\s+)', dict_gdas_tasks[each_task]): - if 'memory' not in each_line: - temp_task_string.append(each_line) - dict_gdas_tasks[each_task] = ''.join(temp_task_string) - - # Removes &MEMORY_JOB_DUMP post mortem from gfs tasks - for each_task, each_resource_string in dict_gfs_resources.items(): - if each_task not in dict_gfs_tasks: - continue - if 'MEMORY' not in each_resource_string: - temp_task_string = [] - for each_line in re.split(r'(\s+)', dict_gfs_tasks[each_task]): - if 'memory' not in each_line: - temp_task_string.append(each_line) - dict_gfs_tasks[each_task] = ''.join(temp_task_string) - - # Put together the XML file - xmlfile = [] - - xmlfile.append(preamble) - - xmlfile.append(definitions) - - xmlfile.append(dict_to_strings(dict_gdas_resources)) - - if dohybvar in ['Y', 'YES']: - xmlfile.append(dict_to_strings(dict_hyb_resources)) - - if gfs_cyc != 0: - xmlfile.append(dict_to_strings(dict_gfs_resources)) - elif gfs_cyc == 0 and dohybvar in ['Y', 'YES'] and eupd_cyc in ['BOTH', 'GFS']: - xmlfile.append(dict_gfs_resources['gfsprep']) - - xmlfile.append(workflow_header) - - xmlfile.append(dict_to_strings(dict_gdas_tasks)) - - if dohybvar in ['Y', 'YES']: - xmlfile.append(dict_to_strings(dict_hyb_tasks)) - - if gfs_cyc != 0: - xmlfile.append(dict_to_strings(dict_gfs_tasks)) - elif gfs_cyc == 0 and dohybvar in ['Y', 'YES'] and eupd_cyc in ['BOTH', 'GFS']: - xmlfile.append(dict_gfs_tasks['gfsprep']) - xmlfile.append('\n') - - xmlfile.append(workflow_footer) - - # Write the XML file - fh = open(f'{base["EXPDIR"]}/{base["PSLOT"]}.xml', 'w') - fh.write(''.join(xmlfile)) - fh.close() - - return - - -if __name__ == '__main__': - main() - sys.exit(0) diff --git a/ush/rocoto/setup_workflow_fcstonly.py b/ush/rocoto/setup_workflow_fcstonly.py deleted file mode 100755 index e0fd802123..0000000000 --- a/ush/rocoto/setup_workflow_fcstonly.py +++ /dev/null @@ -1,897 +0,0 @@ -#!/usr/bin/env python3 - -''' - PROGRAM: - Create the ROCOTO workflow for a forecast only experiment given the configuration of the GFS parallel - - AUTHOR: - Rahul.Mahajan - rahul.mahajan@noaa.gov - - FILE DEPENDENCIES: - 1. config files for the parallel; e.g. config.base, config.fcst[.gfs], etc. - Without this dependency, the script will fail - - OUTPUT: - 1. PSLOT.xml: XML workflow - 2. PSLOT.crontab: crontab for ROCOTO run command - -''' - -import os -import sys -import re -import numpy as np -from datetime import datetime -from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter -import rocoto -import workflow_utils as wfu - -taskplan = ['getic', 'init', 'coupled_ic', 'aerosol_init', 'waveinit', 'waveprep', 'fcst', 'post', 'wavepostsbs', 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavegempak', 'waveawipsbulls', 'waveawipsgridded', 'wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25', 'postsnd', 'gempak', 'awips', 'vrfy', 'metp', 'arch', 'ocnpost'] - -def main(): - parser = ArgumentParser(description='Setup XML workflow and CRONTAB for a forecast only experiment.', formatter_class=ArgumentDefaultsHelpFormatter) - parser.add_argument('--expdir',help='full path to experiment directory containing config files', type=str, required=False, default=os.environ['PWD']) - parser.add_argument('--cdump',help='cycle to run forecasts', type=str, choices=['gdas', 'gfs'], default='gfs', required=False) - - args = parser.parse_args() - - configs = wfu.get_configs(args.expdir) - - _base = wfu.config_parser([wfu.find_config('config.base', configs)]) - - if not os.path.samefile(args.expdir,_base['EXPDIR']): - print('MISMATCH in experiment directories!') - print(f'''config.base: EXPDIR = {repr(_base['EXPDIR'])}''') - print(f'input arg: --expdir = {repr(args.expdir)}') - sys.exit(1) - - dict_configs = wfu.source_configs(configs, taskplan) - - dict_configs['base']['CDUMP'] = args.cdump - - # First create workflow XML - create_xml(dict_configs) - - # Next create the crontab - wfu.create_crontab(dict_configs['base']) - - return - - -def get_preamble(): - ''' - Generate preamble for XML - ''' - - strings = [] - - strings.append('\n') - strings.append('\n') - - return ''.join(strings) - - -def get_definitions(base): - ''' - Create entities related to the experiment - ''' - - machine = base.get('machine', wfu.detectMachine()) - scheduler = wfu.get_scheduler(machine) - hpssarch = base.get('HPSSARCH', 'NO').upper() - - strings = [] - - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - if base['INTERVAL'] is None: - print('cycle INTERVAL cannot be None') - sys.exit(1) - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - if scheduler in ['slurm']: - strings.append(f'''\t\n''') - strings.append(f'''\t\n''') - strings.append(f'\t\n') - strings.append('\n') - strings.append('\t\n') - strings.append(f'''\t\n''') - strings.append('\n') - strings.append('\t\n') - strings.append('\t\n') - strings.append('\t\n') - strings.append('\t\n') - strings.append('\n') - - return ''.join(strings) - - -def get_resources(dict_configs, cdump='gdas'): - ''' - Create resource entities - ''' - - strings = [] - - strings.append('\t\n') - strings.append('\n') - - base = dict_configs['base'] - machine = base.get('machine', wfu.detectMachine()) - reservation = base.get('RESERVATION', 'NONE').upper() - scheduler = wfu.get_scheduler(machine) - - do_wave = base.get('DO_WAVE', 'NO').upper() - do_bufrsnd = base.get('DO_BUFRSND', 'NO').upper() - do_gempak = base.get('DO_GEMPAK', 'NO').upper() - do_awips = base.get('DO_AWIPS', 'NO').upper() - do_metp = base.get('DO_METP', 'NO').upper() - - for task in taskplan: - - cfg = dict_configs[task] - - wtimestr, resstr, queuestr, memstr, natstr = wfu.get_resources(machine, cfg, task, reservation, cdump=cdump) - - taskstr = f'{task.upper()}_{cdump.upper()}' - - strings.append(f'\t\n') - if scheduler in ['slurm']: - if task in ['getic', 'arch']: - strings.append(f'\t\n') - else: - strings.append(f'\t\n') - - strings.append(f'\t\n') - strings.append(f'\t\n') - if len(memstr) != 0: - strings.append(f'\t\n') - strings.append(f'\t\n') - - strings.append('\n') - - strings.append('\t\n') - - return ''.join(strings) - - -def get_postgroups(post, cdump='gdas'): - - fhmin = post['FHMIN'] - fhmax = post['FHMAX'] - fhout = post['FHOUT'] - - # Get a list of all forecast hours - if cdump in ['gdas']: - fhrs = list(range(fhmin, fhmax + fhout, fhout)) - elif cdump in ['gfs']: - fhmax = np.max([post['FHMAX_GFS_00'], post['FHMAX_GFS_06'], post['FHMAX_GFS_12'], post['FHMAX_GFS_18']]) - fhout = post['FHOUT_GFS'] - fhmax_hf = post['FHMAX_HF_GFS'] - fhout_hf = post['FHOUT_HF_GFS'] - fhrs_hf = list(range(fhmin, fhmax_hf + fhout_hf, fhout_hf)) - fhrs = fhrs_hf + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout)) - - npostgrp = post['NPOSTGRP'] - ngrps = npostgrp if len(fhrs) > npostgrp else len(fhrs) - - fhrs = [f'f{f:03d}' for f in fhrs] - fhrs = np.array_split(fhrs, ngrps) - fhrs = [f.tolist() for f in fhrs] - - fhrgrp = ' '.join([f'_{f[0]}-{f[-1]}' for f in fhrs]) - fhrdep = ' '.join([f[-1] for f in fhrs]) - fhrlst = ' '.join(['_'.join(f) for f in fhrs]) - - return fhrgrp, fhrdep, fhrlst - - -def get_workflow(dict_configs, cdump='gdas'): - ''' - Create tasks for forecast only workflow - ''' - - envars = [] - envars.append(rocoto.create_envar(name='RUN_ENVIR', value='&RUN_ENVIR;')) - envars.append(rocoto.create_envar(name='HOMEgfs', value='&HOMEgfs;')) - envars.append(rocoto.create_envar(name='EXPDIR', value='&EXPDIR;')) - envars.append(rocoto.create_envar(name='CDATE', value='@Y@m@d@H')) - envars.append(rocoto.create_envar(name='CDUMP', value='&CDUMP;')) - envars.append(rocoto.create_envar(name='PDY', value='@Y@m@d')) - envars.append(rocoto.create_envar(name='cyc', value='@H')) - - base = dict_configs['base'] - machine = base.get('machine', wfu.detectMachine()) - hpssarch = base.get('HPSSARCH', 'NO').upper() - app = base.get('APP', "ATM").upper() - do_wave = base.get('DO_WAVE', 'NO').upper() - do_ocean = base.get('DO_OCN', 'NO').upper() - do_ice = base.get('DO_ICE', 'NO').upper() - do_aero = base.get('DO_AERO', 'NO').upper() - do_wave_cdump = base.get('WAVE_CDUMP', 'BOTH').upper() - if do_wave in ['YES']: - do_wave_bnd = dict_configs['wavepostsbs'].get('DOBNDPNT_WAVE', "YES").upper() - do_bufrsnd = base.get('DO_BUFRSND', 'NO').upper() - do_gempak = base.get('DO_GEMPAK', 'NO').upper() - do_awips = base.get('DO_AWIPS', 'NO').upper() - do_wafs = base.get('WAFSF', 'NO').upper() - do_vrfy = base.get('DO_VRFY', 'YES').upper() - do_metp = base.get('DO_METP', 'NO').upper() - n_tiles = 6 - - tasks = [] - - if app in ['S2S', 'S2SW']: - # Copy prototype ICs - deps = [] - base_cplic = dict_configs['coupled_ic']['BASE_CPLIC'] - - # ATM ICs - for file in ['gfs_ctrl.nc'] + [f'{datatype}_data.tile{tile_index}.nc' for datatype in ['gfs', 'sfc'] for tile_index in range(1, n_tiles + 1)]: - data = f"{base_cplic}/{dict_configs['coupled_ic'][f'CPL_ATMIC']}/@Y@m@d@H/&CDUMP;/{base.get('CASE','C384')}/INPUT/{file}" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - # Ocean ICs - if do_ocean in ["YES"]: - ocn_res = base.get('OCNRES', '025') - for res in ['res'] + [f'res_{res_index}' for res_index in range(1, 5)]: - data = f"{base_cplic}/{dict_configs['coupled_ic'][f'CPL_OCNIC']}/@Y@m@d@H/ocn/{ocn_res:03d}/MOM.{res}.nc" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - # Ice ICs - if do_ice in ["YES"]: - ice_res = base.get('ICERES', '025') - ice_res_dec = f'{float(ice_res)/100:.2f}' - data = f"{base_cplic}/{dict_configs['coupled_ic'][f'CPL_ICEIC']}/@Y@m@d@H/ice/{ice_res:03d}/cice5_model_{ice_res_dec}.res_@Y@m@d@H.nc" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - # Wave ICs - if do_wave in ["YES"]: - for wave_grid in dict_configs['waveinit']['waveGRD'].split(): - data = f"{base_cplic}/{dict_configs['coupled_ic'][f'CPL_WAVIC']}/@Y@m@d@H/wav/{wave_grid}/@Y@m@d.@H0000.restart.{wave_grid}" - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('coupled_ic', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - else: - if hpssarch in ['YES']: - deps = [] - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/sfc_data.tile6.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='nor', dep=deps) - - task = wfu.create_wf_task('getic', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # init - deps = [] - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/gfs.t@Hz.sanl' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/gfs.t@Hz.atmanl.nemsio' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/gfs.t@Hz.atmanl.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/gfs.t@Hz.atmanl.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) - - if hpssarch in ['YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}getic'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies2 = rocoto.create_dependency(dep=deps) - - deps = [] - deps.append(dependencies) - if hpssarch in ['YES']: - deps.append(dependencies2) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - - task = wfu.create_wf_task('init', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # waveinit - if do_wave in ['Y', 'YES'] and do_wave_cdump in ['GFS', 'BOTH']: - task = wfu.create_wf_task('waveinit', cdump=cdump, envar=envars) - tasks.append(task) - tasks.append('\n') - - # waveprep - if do_wave in ['Y', 'YES'] and do_wave_cdump in ['GFS', 'BOTH']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}waveinit'} - deps.append(rocoto.add_dependency(dep_dict)) - if app not in ['S2S', 'S2SW']: - dep_dict = {'type': 'task', 'name': f'{cdump}init'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('waveprep', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # aerosol_init - if do_aero in ['Y', 'YES']: - deps = [] - if app in ['S2S', 'S2SW']: - dep_dict = {'type': 'task', 'name': 'coupled_ic'} - else: - dep_dict = {'type': 'task', 'name': f'{cdump}init'} - - deps.append(rocoto.add_dependency(dep_dict)) - - # Files from current cycle - files = ['gfs_ctrl.nc'] + [f'gfs_data.tile{tile_index}.nc' for tile_index in range(1, n_tiles + 1)] - for file in files: - data = f'&ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/{file}' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - # Files from previous cycle - dep_dict = {'type': 'cycleexist', 'offset': f'-{base["INTERVAL"]}'} - deps.append(rocoto.add_dependency(dep_dict)) - - files = [f'@Y@m@d.@H0000.fv_core.res.nc'] + \ - [f'@Y@m@d.@H0000.fv_core.res.tile{tile_index}.nc' for tile_index in range(1, n_tiles + 1)] + \ - [f'@Y@m@d.@H0000.fv_tracer.res.tile{tile_index}.nc' for tile_index in range(1, n_tiles + 1)] - - data = f'&ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RERUN_RESTART/' - dep_dict = {'type': 'data', 'data': data, 'offset': f'-{base["INTERVAL"]}'} - # Hack off the trailing tag because we are going to concatenate with the rest - dependency1 = rocoto.add_dependency(dep_dict)[:-10] - for file in files: - dep_dict = {'type': 'data', 'data': file} - # Hack off the leading tag to join with the earlier one - dependency2 = rocoto.add_dependency(dep_dict)[9:] - # Combine the two into a dependency with two different cyclestr tags - deps.append(dependency1 + dependency2) - - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('aerosol_init', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # fcst - deps = [] - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/INPUT/sfc_data.tile6.nc' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - data = '&ROTDIR;/&CDUMP;.@Y@m@d/@H/atmos/RESTART/@Y@m@d.@H0000.sfcanl_data.tile6.nc' - dep_dict = {'type':'data', 'data':data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) - - if do_wave in ['Y', 'YES'] and do_wave_cdump in ['GFS', 'BOTH']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}waveprep'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies2 = rocoto.create_dependency(dep_condition='and', dep=deps) - - if do_aero in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}aerosol_init'} - deps.append(rocoto.add_dependency(dep_dict)) - deps2 = [] - dep_dict = {'type': 'cycleexist', 'offset': f'-{base["INTERVAL"]}'} - deps2.append(rocoto.add_dependency(dep_dict)) - deps.append(rocoto.create_dependency(dep_condition='not', dep=deps2)) - dependencies3 = rocoto.create_dependency(dep_condition='or', dep=deps) - - deps = [] - deps.append(dependencies) - if do_wave in ['Y', 'YES'] and do_wave_cdump in ['GFS', 'BOTH']: - deps.append(dependencies2) - if do_aero in ['Y', 'YES']: - deps.append(dependencies3) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - - task = wfu.create_wf_task('fcst', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # post - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.log#dep#.txt' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - fhrgrp = rocoto.create_envar(name='FHRGRP', value='#grp#') - fhrlst = rocoto.create_envar(name='FHRLST', value='#lst#') - ROTDIR = rocoto.create_envar(name='ROTDIR', value='&ROTDIR;') - postenvars = envars + [fhrgrp] + [fhrlst] + [ROTDIR] - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = get_postgroups(dict_configs['post'], cdump=cdump) - vardict = {varname2: varval2, varname3: varval3} - task = wfu.create_wf_task('post', cdump=cdump, envar=postenvars, dependency=dependencies, - metatask='post', varname=varname1, varval=varval1, vardict=vardict) - tasks.append(task) - tasks.append('\n') - - # wavepostsbs - if do_wave in ['Y', 'YES'] and do_wave_cdump in ['GFS', 'BOTH']: - deps = [] - for wave_grid in dict_configs['wavepostsbs']['waveGRD'].split(): - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/wave/rundata/{cdump}wave.out_grd.{wave_grid}.@Y@m@d.@H0000' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wavepostsbs', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wavepostbndpnt - if do_wave in ['Y', 'YES'] and do_wave_bnd in ['YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}fcst'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wavepostbndpnt', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wavepostbndpntbll - if do_wave in ['Y', 'YES'] and do_wave_bnd in ['YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.logf180.txt' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wavepostbndpntbll', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wavepostpnt - if do_wave in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}fcst'} - deps.append(rocoto.add_dependency(dep_dict)) - if do_wave_bnd in ['YES']: - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostbndpntbll'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wavepostpnt', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wavegempak - if do_wave in ['Y', 'YES'] and do_gempak in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostsbs'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wavegempak', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # waveawipsbulls - if do_wave in ['Y', 'YES'] and do_awips in ['Y', 'YES']: - deps = [] - dep_dict = {'type':'task', 'name':f'{cdump}wavepostsbs'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type':'task', 'name':f'{cdump}wavepostpnt'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('waveawipsbulls', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # waveawipsgridded - if do_wave in ['Y', 'YES'] and do_awips in ['Y', 'YES']: - deps = [] - dep_dict = {'type':'task', 'name':f'{cdump}wavepostsbs'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('waveawipsgridded', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # ocnpost - if do_ocean in ['YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.log#dep#.txt' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - fhrgrp = rocoto.create_envar(name='FHRGRP', value='#grp#') - fhrlst = rocoto.create_envar(name='FHRLST', value='#lst#') - ROTDIR = rocoto.create_envar(name='ROTDIR', value='&ROTDIR;') - postenvars = envars + [fhrgrp] + [fhrlst] + [ROTDIR] - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = get_postgroups(dict_configs['ocnpost'], cdump=cdump) - vardict = {varname2: varval2, varname3: varval3} - task = wfu.create_wf_task('ocnpost', cdump=cdump, envar=postenvars, dependency=dependencies, - metatask='ocnpost', varname=varname1, varval=varval1, vardict=vardict) - tasks.append(task) - tasks.append('\n') - - # wafs - if do_wafs in ['Y', 'YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if006' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if012' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if015' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if018' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if021' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if024' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if027' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if030' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if033' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if036' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wafs', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wafsgcip - if do_wafs in ['Y', 'YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if006' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if012' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if015' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if018' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if021' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if024' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if027' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if030' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if033' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if036' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wafsgcip', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wafsgrib2 - if do_wafs in ['Y', 'YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if006' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if012' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if015' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if018' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if021' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if024' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if027' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if030' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if033' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if036' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wafsgrib2', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wafsgrib20p25 - if do_wafs in ['Y', 'YES']: - deps = [] - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if006' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if012' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if015' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if018' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if021' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if024' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if027' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if030' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if033' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - data = f'&ROTDIR;/{cdump}.@Y@m@d/@H/atmos/{cdump}.t@Hz.wafs.grb2if036' - dep_dict = {'type': 'data', 'data': data} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('wafsgrib20p25', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wafsblending - if do_wafs in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}wafsgrib2'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wafsblending', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # wafsblending0p25 - if do_wafs in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}wafsgrib20p25'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('wafsblending0p25', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - #postsnd - if do_bufrsnd in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'task', 'name': f'{cdump}fcst'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('postsnd', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # awips - if do_awips in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - fhrgrp = rocoto.create_envar(name='FHRGRP', value='#grp#') - fhrlst = rocoto.create_envar(name='FHRLST', value='#lst#') - ROTDIR = rocoto.create_envar(name='ROTDIR', value='&ROTDIR;') - awipsenvars = envars + [fhrgrp] + [fhrlst] + [ROTDIR] - varname1, varname2, varname3 = 'grp', 'dep', 'lst' - varval1, varval2, varval3 = get_awipsgroups(dict_configs['awips'], cdump=cdump) - vardict = {varname2: varval2, varname3: varval3} - task = wfu.create_wf_task('awips', cdump=cdump, envar=awipsenvars, dependency=dependencies, - metatask='awips', varname=varname1, varval=varval1, vardict=vardict) - tasks.append(task) - tasks.append('\n') - - # gempak - if do_gempak in ['Y', 'YES']: - deps = [] - dep_dict = {'type': 'metatask', 'name': f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('gempak', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # vrfy - if do_vrfy in ['Y', 'YES']: - deps = [] - dep_dict = {'type':'metatask', 'name':f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep=deps) - task = wfu.create_wf_task('vrfy', cdump=cdump, envar=envars, dependency=dependencies) - tasks.append(task) - tasks.append('\n') - - # metp - if do_metp in ['Y', 'YES']: - deps = [] - dep_dict = {'type':'metatask', 'name':f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - sdate_gfs = rocoto.create_envar(name='SDATE_GFS', value='&SDATE;') - metpcase = rocoto.create_envar(name='METPCASE', value='#metpcase#') - metpenvars = envars + [sdate_gfs] + [metpcase] - varname1 = 'metpcase' - varval1 = 'g2g1 g2o1 pcp1' - task = wfu.create_wf_task('metp', cdump=cdump, envar=metpenvars, dependency=dependencies, - metatask='metp', varname=varname1, varval=varval1) - tasks.append(task) - tasks.append('\n') - - # arch - deps = [] - dep_dict = {'type':'metatask', 'name':f'{cdump}post'} - deps.append(rocoto.add_dependency(dep_dict)) - if do_vrfy in ['Y', 'YES']: - dep_dict = {'type':'task', 'name':f'{cdump}vrfy'} - deps.append(rocoto.add_dependency(dep_dict)) - if cdump in ['gfs'] and do_metp in ['Y', 'YES']: - dep_dict = {'type':'metatask', 'name':f'{cdump}metp'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type':'streq', 'left':'&ARCHIVE_TO_HPSS;', 'right':f'{hpssarch}'} - deps.append(rocoto.add_dependency(dep_dict)) - if do_wave in ['Y', 'YES']: - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostsbs'} - deps.append(rocoto.add_dependency(dep_dict)) - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostpnt'} - deps.append(rocoto.add_dependency(dep_dict)) - if do_wave_bnd in ['YES']: - dep_dict = {'type': 'task', 'name': f'{cdump}wavepostbndpnt'} - deps.append(rocoto.add_dependency(dep_dict)) - if do_ocean in ['Y', 'YES']: - dep_dict = {'type': 'metatask', 'name': f'{cdump}ocnpost'} - deps.append(rocoto.add_dependency(dep_dict)) - dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) - task = wfu.create_wf_task('arch', cdump=cdump, envar=envars, dependency=dependencies, final=True) - tasks.append(task) - tasks.append('\n') - - return ''.join(tasks) - - -def get_workflow_body(dict_configs, cdump='gdas'): - ''' - Create the workflow body - ''' - - strings = [] - - strings.append('\n') - strings.append(']>\n') - strings.append('\n') - strings.append('\n') - strings.append('\n') - strings.append('\t&EXPDIR;/logs/@Y@m@d@H.log\n') - strings.append('\n') - strings.append('\t\n') - strings.append(f'\t&SDATE; &EDATE; &INTERVAL;\n') - strings.append('\n') - strings.append(get_workflow(dict_configs, cdump=cdump)) - strings.append('\n') - strings.append('\n') - - return ''.join(strings) - - -def create_xml(dict_configs): - ''' - Given an experiment directory containing config files and - XML directory containing XML templates, create the workflow XML - ''' - - - dict_configs['base']['INTERVAL'] = wfu.get_gfs_interval(dict_configs['base']['gfs_cyc']) - base = dict_configs['base'] - - preamble = get_preamble() - definitions = get_definitions(base) - resources = get_resources(dict_configs, cdump=base['CDUMP']) - workflow = get_workflow_body(dict_configs, cdump=base['CDUMP']) - - # Removes &MEMORY_JOB_DUMP post mortem from gdas tasks - temp_workflow = '' - memory_dict = [] - for each_resource_string in re.split(r'(\s+)', resources): - if 'MEMORY' in each_resource_string: - memory_dict.append(each_resource_string) - for each_line in re.split(r'(\s+)', workflow): - if 'MEMORY' not in each_line: - temp_workflow += each_line - else: - if any( substring in each_line for substring in memory_dict): - temp_workflow += each_line - workflow = temp_workflow - - # Start writing the XML file - fh = open(f'{base["EXPDIR"]}/{base["PSLOT"]}.xml', 'w') - - fh.write(preamble) - fh.write(definitions) - fh.write(resources) - fh.write(workflow) - - fh.close() - - return - -if __name__ == '__main__': - main() - sys.exit(0) diff --git a/ush/rocoto/workflow_utils.py b/ush/rocoto/workflow_utils.py deleted file mode 120000 index b90b3bf9bf..0000000000 --- a/ush/rocoto/workflow_utils.py +++ /dev/null @@ -1 +0,0 @@ -workflow_utils.py_gsl \ No newline at end of file diff --git a/ush/rstprod.sh b/ush/rstprod.sh new file mode 100755 index 0000000000..acac0340bb --- /dev/null +++ b/ush/rstprod.sh @@ -0,0 +1,19 @@ +#! /usr/bin/env bash + +source "$HOMEgfs/ush/preamble.sh" + +#--------------------------------------------------------- +# rstprod.sh +# +# Restrict data from select sensors and satellites +#--------------------------------------------------------- + +# Restrict select sensors and satellites + +export CHGRP_CMD=${CHGRP_CMD:-"chgrp ${group_name:-rstprod}"} +rlist="saphir abi_g16" +for rtype in $rlist; do + if compgen -G "*${rtype}*" > /dev/null; then + ${CHGRP_CMD} *${rtype}* + fi +done diff --git a/ush/scale_dec.sh b/ush/scale_dec.sh index 8fba2f703b..77136d7f70 100755 --- a/ush/scale_dec.sh +++ b/ush/scale_dec.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + # # This script uses WGRIB2 to change binary scale factor # and Decimal scale factor in GRIB2 file @@ -7,11 +8,12 @@ # D = decimal scaling or the text 'same' with no quotes # B = binary scaling or the text 'same' with no quotes # -set -x + +source "$HOMEgfs/ush/preamble.sh" f=$1 -export WGRIB2=${WGRIB2:-${NWROOT}/grib_util.v1.1.0/exec/wgrib2} +export WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} # export WGRIB2=/gpfs/dell1/nco/ops/nwprod/grib_util.v1.1.0/exec/wgrib2 @@ -22,4 +24,5 @@ $WGRIB2 $f -not_if ':(TMP|PWAT|WEASD):' -grib $f.new \ -set_scaling 0 0 -grib_out $f.new export err=$?; err_chk mv $f.new $f + exit 0 diff --git a/ush/syndat_getjtbul.sh b/ush/syndat_getjtbul.sh index dc3c0f6482..c17067ff72 100755 --- a/ush/syndat_getjtbul.sh +++ b/ush/syndat_getjtbul.sh @@ -1,3 +1,4 @@ +#! /usr/bin/env bash # Script to recover JTWC Bulletins from Tank # $TANK_TROPCY/$ymddir/wtxtbul/tropcyc @@ -25,25 +26,15 @@ # jlogfile - path to job log file (skipped over by this script if not # passed in) - -set -xua +source "$HOMEgfs/ush/preamble.sh" EXECSYND=${EXECSYND:-${HOMESYND}/exec} cd $DATA if [ "$#" -ne '1' ]; then - msg="**NON-FATAL ERROR PROGRAM SYNDAT_GETJTBUL run date not in \ + echo "**NON-FATAL ERROR PROGRAM SYNDAT_GETJTBUL run date not in \ positional parameter 1" - set +x - echo - echo $msg - echo - set -x - echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u echo "Leaving sub-shell syndat_getjtbul.sh to recover JTWC Bulletins" \ >> $pgmout @@ -94,7 +85,7 @@ echo " pdym1 is $pdym1" echo echo " ymddir is $ymddir" echo -set -x +set_trace find=$ymd" "$hour echo "looking for string $find in $jtwcdir/tropcyc" >> $pgmout @@ -124,18 +115,15 @@ fi perl -wpi.ORIG -e 's/(^.... ... )(\S{9,9})(\S{1,})/$1$2/' jtwcbul diff jtwcbul.ORIG jtwcbul > jtwcbul_changes.txt if [ -s jtwcbul_changes.txt ]; then - msg="***WARNING: SOME JTWC VITALS SEGMENTS REQUIRED PRELIMINARY MODIFICATION!" - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - echo -e "\n${msg}. Changes follow:" >> $pgmout - cat jtwcbul_changes.txt >> $pgmout - echo -e "\n" >> $pgmout + echo "***WARNING: SOME JTWC VITALS SEGMENTS REQUIRED PRELIMINARY MODIFICATION!" + cat jtwcbul_changes.txt fi # Execute bulletin processing [ -s jtwcbul ] && echo "Processing JTWC bulletin halfs into tcvitals records" >> $pgmout -pgm=$(basename $EXECSYND/syndat_getjtbul) +pgm=$(basename $EXECSYND/syndat_getjtbul.x) export pgm if [ -s prep_step ]; then set +u @@ -150,7 +138,7 @@ rm -f fnoc export FORT11=jtwcbul export FORT51=fnoc -time -p $EXECSYND/syndat_getjtbul >> $pgmout 2> errfile +time -p ${EXECSYND}/${pgm} >> $pgmout 2> errfile errget=$? ###cat errfile cat errfile >> $pgmout @@ -159,7 +147,7 @@ set +x echo echo 'The foreground exit status for SYNDAT_GETJTBUL is ' $errget echo -set -x +set_trace if [ "$errget" -gt '0' ];then if [ "$errget" -eq '1' ];then msg="No JTWC bulletins in $jtwcdir/tropcyc, no JTWC tcvitals \ @@ -175,30 +163,12 @@ available for qctropcy for $CDATE10" fi fi else - msg="**NON-FATAL ERROR PROGRAM SYNDAT_GETJTBUL FOR $CDATE10 \ + echo "**NON-FATAL ERROR PROGRAM SYNDAT_GETJTBUL FOR $CDATE10 \ RETURN CODE $errget" fi - set +x - echo - echo $msg - echo - set -x - echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u else - msg="program SYNDAT_GETJTBUL completed normally for $CDATE10, JTWC \ + echo "program SYNDAT_GETJTBUL completed normally for $CDATE10, JTWC \ rec. passed to qctropcy" - set +x - echo - echo $msg - echo - set -x - echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi set +x echo @@ -206,7 +176,7 @@ echo "----------------------------------------------------------" echo "*********** COMPLETED PROGRAM syndat_getjtbul **********" echo "----------------------------------------------------------" echo -set -x +set_trace if [ "$errget" -eq '0' ];then echo "Completed JTWC tcvitals records are:" >> $pgmout @@ -215,6 +185,6 @@ fi echo "Leaving sub-shell syndat_getjtbul.sh to recover JTWC Bulletins" \ >> $pgmout -echo " " >> $pgmout +echo " " >> "${pgmout}" exit diff --git a/ush/syndat_qctropcy.sh b/ush/syndat_qctropcy.sh index 1f1f64b548..5b5b4ba34b 100755 --- a/ush/syndat_qctropcy.sh +++ b/ush/syndat_qctropcy.sh @@ -1,5 +1,4 @@ - -set +x +#! /usr/bin/env bash # SCRIPT NAME : syndat_qctropcy.sh # AUTHOR : Steven Lord/Hua-Lu pan/Dennis Keyser/Diane Stokes @@ -12,19 +11,19 @@ set +x # prediction centers by the executable syndat_qctropcy # # -echo "History: JUN 1997 - First implementation of this utility script" -echo " JUL 1997 - Added tcvitals made manually by SDM; Added " -echo " jtwc/fnoc tcvitals " -echo " MAR 2000 Converted to IBM-SP " -echo " MAR 2013 Converted to WCOSS " -echo " Added option files_override which can set " -echo " namelist var used for logical variable " -echo " FILES in syndat_qctropcy to control final " -echo " copying of records and file manipulation. " -echo " (typically F for testing, otherwise not set)" -echo " Added dateck fallback if archive file misg." -echo " OCT 2013 Remove defaults for parm, exec, fix and ush " -echo " directories. These must now be passed in. " +# echo "History: JUN 1997 - First implementation of this utility script" +# echo " JUL 1997 - Added tcvitals made manually by SDM; Added " +# echo " jtwc/fnoc tcvitals " +# echo " MAR 2000 Converted to IBM-SP " +# echo " MAR 2013 Converted to WCOSS " +# echo " Added option files_override which can set " +# echo " namelist var used for logical variable " +# echo " FILES in syndat_qctropcy to control final " +# echo " copying of records and file manipulation. " +# echo " (typically F for testing, otherwise not set)" +# echo " Added dateck fallback if archive file misg." +# echo " OCT 2013 Remove defaults for parm, exec, fix and ush " +# echo " directories. These must now be passed in. " # # # Positional parameters passed in: @@ -64,22 +63,20 @@ echo " directories. These must now be passed in. " # copy_back - switch to copy updated files back to archive directory and # to tcvitals directory # (Default: YES) -# jlogfile - path to job log file (skipped over by this script if not -# passed in) # SENDCOM switch copy output files to $COMSP # (Default: YES) # files_override - switch to override default "files" setting for given run # (Default: not set) # TIMEIT - optional time and resource reporting (Default: not set) -set -xua +source "$HOMEgfs/ush/preamble.sh" ARCHSYND=${ARCHSYND:-$COMROOTp3/gfs/prod/syndat} HOMENHCp1=${HOMENHCp1:-/gpfs/?p1/nhc/save/guidance/storm-data/ncep} HOMENHC=${HOMENHC:-/gpfs/dell2/nhc/save/guidance/storm-data/ncep} TANK_TROPCY=${TANK_TROPCY:-${DCOMROOT}/us007003} -FIXSYND=${FIXSYND:-$HOMEgfs/fix/fix_am} +FIXSYND=${FIXSYND:-$HOMEgfs/fix/am} USHSYND=${USHSYND:-$HOMEgfs/ush} EXECSYND=${EXECSYND:-$HOMEgfs/exec} PARMSYND=${PARMSYND:-$HOMEgfs/parm/relo} @@ -96,11 +93,8 @@ set +x echo echo $msg echo -set -x +set_trace echo $msg >> $pgmout -set +u -[ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" -set -u if [ "$#" -ne '1' ]; then msg="**NON-FATAL ERROR PROGRAM SYNDAT_QCTROPCY run date not in \ @@ -109,33 +103,29 @@ positional parameter 1" echo echo $msg echo - set -x + set_trace echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u msg="**NO TROPICAL CYCLONE tcvitals processed --> non-fatal" set +x echo echo $msg echo - set -x + set_trace echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u -# Copy null files into "${COMSP}syndata.tcvitals.$tmmark" and -# "${COMSP}jtwc-fnoc.tcvitals.$tmmark" so later ftp attempts will find and +# Copy null files into "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.$tmmark" and +# "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.$tmmark" so later ftp attempts will find and # copy the zero-length file and avoid wasting time with multiple attempts # to remote machine(s) # (Note: Only do so if files don't already exist) if [ $SENDCOM = YES ]; then - [ ! -s ${COMSP}syndata.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}syndata.tcvitals.$tmmark - [ ! -s ${COMSP}jtwc-fnoc.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}jtwc-fnoc.tcvitals.$tmmark + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" + fi + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" + fi fi exit @@ -147,7 +137,7 @@ set +x echo echo "Run date is $CDATE10" echo -set -x +set_trace year=$(echo $CDATE10 | cut -c1-4) @@ -169,11 +159,8 @@ if [ $dateck_size -lt 10 ]; then echo 1900010100 > dateck set +x echo -e "\n${msg}\n" - set -x + set_trace echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi @@ -201,11 +188,8 @@ if [ -n "$files_override" ]; then # for testing, typically want FILES=F fi set +x echo -e "\n${msg}\n" - set -x + set_trace echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi echo " &INPUT RUNID = '${net}_${tmmark}_${cyc}', FILES = $files " > vitchk.inp @@ -266,7 +250,7 @@ cp $slmask slmask.126 # Execute program syndat_qctropcy -pgm=$(basename $EXECSYND/syndat_qctropcy) +pgm=$(basename $EXECSYND/syndat_qctropcy.x) export pgm if [ -s prep_step ]; then set +u @@ -280,7 +264,7 @@ fi echo "$CDATE10" > cdate10.dat export FORT11=slmask.126 export FORT12=cdate10.dat -$EXECSYND/syndat_qctropcy >> $pgmout 2> errfile +${EXECSYND}/${pgm} >> $pgmout 2> errfile errqct=$? ###cat errfile cat errfile >> $pgmout @@ -289,40 +273,36 @@ set +x echo echo "The foreground exit status for SYNDAT_QCTROPCY is " $errqct echo -set -x +set_trace if [ "$errqct" -gt '0' ];then msg="**NON-FATAL ERROR PROGRAM SYNDAT_QCTROPCY RETURN CODE $errqct" set +x echo echo $msg echo - set -x + set_trace echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u msg="**NO TROPICAL CYCLONE tcvitals processed --> non-fatal" set +x echo echo $msg echo - set -x + set_trace echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u # In the event of a ERROR in PROGRAM SYNDAT_QCTROPCY, copy null files into -# "${COMSP}syndata.tcvitals.$tmmark" and "${COMSP}jtwc-fnoc.tcvitals.$tmmark" +# "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.$tmmark" and "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.$tmmark" # so later ftp attempts will find and copy the zero-length file and avoid # wasting time with multiple attempts to remote machine(s) # (Note: Only do so if files don't already exist) if [ $SENDCOM = YES ]; then - [ ! -s ${COMSP}syndata.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}syndata.tcvitals.$tmmark - [ ! -s ${COMSP}jtwc-fnoc.tcvitals.$tmmark ] && \ - cp /dev/null ${COMSP}jtwc-fnoc.tcvitals.$tmmark + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" + fi + if [[ ! -s ${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark} ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" + fi fi exit @@ -333,19 +313,7 @@ echo "----------------------------------------------------------" echo "********** COMPLETED PROGRAM syndat_qctropcy **********" echo "----------------------------------------------------------" echo -set -x - -if [ -s current ]; then - msg="program SYNDAT_QCTROPCY completed normally - tcvitals records \ -processed" -else -msg="no records available for program SYNDAT_QCTROPCY - null tcvitals file \ -produced" -fi -set +u -[ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" -set -u - +set_trace if [ "$copy_back" = 'YES' ]; then cat lthistry>>$ARCHSYND/syndat_lthistry.$year @@ -390,11 +358,8 @@ $HOMENHC/tcvitals successfully updated by syndat_qctropcy" echo echo $msg echo - set -x + set_trace echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi else @@ -405,11 +370,8 @@ not changed by syndat_qctropcy" echo echo $msg echo - set -x + set_trace echo $msg >> $pgmout - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u fi @@ -417,27 +379,15 @@ fi # This is the file that connects to the later RELOCATE and/or PREP scripts -[ $SENDCOM = YES ] && cp current ${COMSP}syndata.tcvitals.$tmmark +[ $SENDCOM = YES ] && cp current "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" # Create the DBNet alert if [ $SENDDBN = "YES" ] then - $DBNROOT/bin/dbn_alert MODEL GDAS_TCVITALS $job ${COMSP}syndata.tcvitals.$tmmark + "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS_TCVITALS" "${job}" "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" fi # Write JTWC/FNOC Tcvitals to /com path since not saved anywhere else -[ $SENDCOM = YES ] && cp fnoc ${COMSP}jtwc-fnoc.tcvitals.$tmmark - -msg="TROPICAL CYCLONE TCVITALS QC PROCESSING HAS COMPLETED FOR $CDATE10" -set +x -echo -echo $msg -echo -set -x -echo $msg >> $pgmout -echo " " >> $pgmout -set +u -[ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" -set -u +[ $SENDCOM = YES ] && cp fnoc "${COM_OBS}/${RUN}.${cycle}.jtwc-fnoc.tcvitals.${tmmark}" exit diff --git a/ush/trim_rh.sh b/ush/trim_rh.sh index 9140e97124..5a8903cae6 100755 --- a/ush/trim_rh.sh +++ b/ush/trim_rh.sh @@ -1,14 +1,15 @@ -#!/bin/ksh -set -x +#! /usr/bin/env bash #This is scripts is used to trim RH vaule larger than 100. # Wen Meng 12/2017: First Version +source "$HOMEgfs/ush/preamble.sh" + f=$1 -export WGRIB2=${WGRIB2:-${NWPROD:-/nwprod}/util/exec/wgrib2} +export WGRIB2=${WGRIB2:-${wgrib2_ROOT}/bin/wgrib2} -$WGRIB2 $optncpu $f -not_if ':RH:' -grib $f.new \ +$WGRIB2 ${optncpu:-} $f -not_if ':RH:' -grib $f.new \ -if ':RH:' -rpn "10:*:0.5:+:floor:1000:min:10:/" -set_grib_type same \ -set_scaling -1 0 -grib_out $f.new export err=$?; err_chk diff --git a/ush/tropcy_relocate.sh b/ush/tropcy_relocate.sh index 44205b1846..9b170ddfd0 100755 --- a/ush/tropcy_relocate.sh +++ b/ush/tropcy_relocate.sh @@ -1,4 +1,5 @@ -#!/bin/ksh +#! /usr/bin/env bash + #### UNIX Script Documentation Block # # Script name: tropcy_relocate.sh @@ -123,7 +124,6 @@ # -stdoutmode ordered" # USHGETGES String indicating directory path for GETGES utility ush # file -# Default is "/nwprod/util/ush" # USHRELO String indicating directory path for RELOCATE ush files # Default is "${HOMERELO}/ush" # EXECRELO String indicating directory path for RELOCATE executables @@ -141,7 +141,7 @@ # Default is "$EXECRELO/relocate_mv_nvortex" # SUPVX String indicating executable path for SUPVIT utility # program -# Default is "$EXECUTIL/supvit" +# Default is "$EXECUTIL/supvit.x" # GETTX String indicating executable path for GETTRK utility # program # Default is "$EXECUTIL/gettrk" @@ -157,8 +157,6 @@ # be used by the script. If they are not, they will be skipped # over by the script. # -# jlogfile String indicating path to joblog file -# # Exported Shell Variables: # CDATE10 String indicating the center date/time for the relocation # processing @@ -182,9 +180,7 @@ # $USHRELO/tropcy_relocate_extrkr.sh) # $DATA/err_chk (here and in child script # $USHRELO/tropcy_relocate_extrkr.sh) -# NOTE 1: postmsg above is required ONLY if "$jlogfile" is -# present. -# NOTE 2: The last three scripts above are NOT REQUIRED utilities. +# NOTE: The last three scripts above are NOT REQUIRED utilities. # If $DATA/prep_step not found, a scaled down version of it is # executed in-line. If $DATA/err_exit or $DATA/err_chk are not # found and a fatal error has occurred, then the script calling @@ -210,12 +206,12 @@ # #### -set -aux +source "$HOMEgfs/ush/preamble.sh" MACHINE=${MACHINE:-$(hostname -s | cut -c 1-3)} SENDCOM=${SENDCOM:-YES} -export NWROOT=${NWROOT:-/nwprod2} +export OPSROOT=${OPSROOT:-/lfs/h1/ops/prod} GRIBVERSION=${GRIBVERSION:-"grib2"} if [ ! -d $DATA ] ; then mkdir -p $DATA ;fi @@ -255,7 +251,7 @@ then echo "problem with obtaining date record;" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + set_trace if [ -s $DATA/err_exit ]; then $DATA/err_exit else @@ -273,7 +269,7 @@ set +x echo echo "CENTER DATE/TIME FOR RELOCATION PROCESSING IS $CDATE10" echo -set -x +set_trace #---------------------------------------------------------------------------- @@ -283,13 +279,12 @@ set -x envir=${envir:-prod} if [ $MACHINE != sgi ]; then - HOMEALL=${HOMEALL:-$NWROOT} + HOMEALL=${HOMEALL:-$OPSROOT} else HOMEALL=${HOMEALL:-/disk1/users/snake/prepobs} fi HOMERELO=${HOMERELO:-${shared_global_home}} -#HOMERELO=${HOMERELO:-$NWROOT/tropcy_qc_reloc.${tropcy_qc_reloc_ver}} envir_getges=${envir_getges:-$envir} if [ $modhr -eq 0 ]; then @@ -316,7 +311,7 @@ RELOX=${RELOX:-$EXECRELO/relocate_mv_nvortex} export BKGFREQ=${BKGFREQ:-1} -SUPVX=${SUPVX:-$EXECRELO/supvit} +SUPVX=${SUPVX:-$EXECRELO/supvit.x} GETTX=${GETTX:-$EXECRELO/gettrk} ################################################ @@ -326,11 +321,7 @@ GETTX=${GETTX:-$EXECRELO/gettrk} # attempt to perform tropical cyclone relocation # ---------------------------------------------- -msg="Attempt to perform tropical cyclone relocation for $CDATE10" -set +u -##[ -n "$jlogfile" ] && $DATA/postmsg "$jlogfile" "$msg" -[ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" -set -u +echo "Attempt to perform tropical cyclone relocation for $CDATE10" if [ $modhr -ne 0 ]; then @@ -343,7 +334,7 @@ if [ $modhr -ne 0 ]; then not a multiple of 3-hrs;" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + set_trace if [ -s $DATA/err_exit ]; then $DATA/err_exit else @@ -366,14 +357,14 @@ echo " Get TCVITALS file valid for -$fhr hrs relative to center" echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + set_trace $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -f $fhr -t tcvges tcvitals.m${fhr} set +x echo echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + set_trace fi done @@ -416,7 +407,7 @@ echo " Get global sigma GUESS valid for $fhr hrs relative to center" echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + set_trace $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -t $stype $sges errges=$? @@ -428,7 +419,7 @@ echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" to center relocation date/time;" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + set_trace if [ -s $DATA/err_exit ]; then $DATA/err_exit else @@ -439,28 +430,28 @@ to center relocation date/time;" fi # For center time sigma guess file obtained via getges, store pathname from -# getges into ${COMSP}sgesprep_pre-relocate_pathname.$tmmark and, for now, -# also in ${COMSP}sgesprep_pathname.$tmmark - if relocation processing stops +# getges into ${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.$tmmark and, for now, +# also in ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark - if relocation processing stops # due to an error or due to no input tcvitals records found, then the center # time sigma guess will not be modified and this getges file will be read in # subsequent PREP processing; if relocation processing continues and the -# center sigma guess is modified, then ${COMSP}sgesprep_pathname.$tmmark will +# center sigma guess is modified, then ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark will # be removed later in this script {the subsequent PREP step will correctly -# update ${COMSP}sgesprep_pathname.$tmmark to point to the sgesprep file +# update ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark to point to the sgesprep file # updated here by the relocation} # ---------------------------------------------------------------------------- if [ $fhr = "0" ]; then - $USHGETGES/getges.sh -e $envir_getges -n $network_getges -v $CDATE10 \ - -t $stype > ${COMSP}sgesprep_pre-relocate_pathname.$tmmark - cp ${COMSP}sgesprep_pre-relocate_pathname.$tmmark \ - ${COMSP}sgesprep_pathname.$tmmark + "${USHGETGES}/getges.sh" -e "${envir_getges}" -n "${network_getges}" -v "${CDATE10}" \ + -t "${stype}" > "${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" + cp "${COM_OBS}/${RUN}.${cycle}.sgesprep_pre-relocate_pathname.${tmmark}" \ + "${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.${tmmark}" fi set +x echo echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + set_trace fi if [ ! -s $pges ]; then set +x @@ -470,7 +461,7 @@ echo " Get global pressure grib GUESS valid for $fhr hrs relative to center" echo " relocation processing date/time" echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + set_trace $USHGETGES/getges.sh -e $envir_getges -n $network_getges \ -v $CDATE10 -t $ptype $pges errges=$? @@ -482,7 +473,7 @@ echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" relative to center relocation date/time;" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + set_trace if [ -s $DATA/err_exit ]; then $DATA/err_exit else @@ -495,14 +486,14 @@ relative to center relocation date/time;" echo echo "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" echo - set -x + set_trace fi done if [ -f ${tstsp}syndata.tcvitals.$tmmark ]; then cp ${tstsp}syndata.tcvitals.$tmmark tcvitals.now else - cp ${COMSP}syndata.tcvitals.$tmmark tcvitals.now + cp "${COM_OBS}/${RUN}.${cycle}.syndata.tcvitals.${tmmark}" "tcvitals.now" fi @@ -523,13 +514,10 @@ grep "$pdy $cyc" VITL errgrep=$? > tcvitals if [ $errgrep -ne 0 ] ; then - msg="NO TCVITAL RECORDS FOUND FOR $CDATE10 - EXIT TROPICAL CYCLONE \ + echo "NO TCVITAL RECORDS FOUND FOR $CDATE10 - EXIT TROPICAL CYCLONE \ RELOCATION PROCESSING" - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u -# The existence of ${COMSP}tropcy_relocation_status.$tmmark file will tell the +# The existence of ${COM_OBS}/${RUN}.${cycle}.tropcy_relocation_status.$tmmark file will tell the # subsequent PREP processing that RELOCATION processing occurred, echo # "NO RECORDS to process" into it to further tell PREP processing that records # were not processed by relocation and the global sigma guess was NOT @@ -537,14 +525,15 @@ RELOCATION PROCESSING" # found) # Note: When tropical cyclone relocation does run to completion and the # global sigma guess is modified, the parent script to this will echo -# "RECORDS PROCESSED" into ${COMSP}tropcy_relocation_status.$tmmark +# "RECORDS PROCESSED" into ${COM_OBS}/${RUN}.${cycle}.tropcy_relocation_status.$tmmark # assuming it doesn't already exist (meaning "NO RECORDS to process" # was NOT echoed into it here) # ---------------------------------------------------------------------------- - echo "NO RECORDS to process" > ${COMSP}tropcy_relocation_status.$tmmark - [ ! -s ${COMSP}tcvitals.relocate.$tmmark ] && \ - cp /dev/null ${COMSP}tcvitals.relocate.$tmmark + echo "NO RECORDS to process" > "${COM_OBS}/${RUN}.${cycle}.tropcy_relocation_status.${tmmark}" + if [[ ! -s "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" ]]; then + cp "/dev/null" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" + fi else cat VITL >>tcvitals @@ -567,7 +556,7 @@ else echo "$USHRELO/tropcy_relocate_extrkr.sh failed" echo "ABNORMAL EXIT!!!!!!!!!!!" echo - set -x + set_trace if [ -s $DATA/err_exit ]; then $DATA/err_exit "Script $USHRELO/tropcy_relocate_extrkr.sh failed" else @@ -650,7 +639,7 @@ else # check for success # ----------------- - echo; set -x + echo; set_trace if [ "$errSTATUS" -gt '0' ]; then if [ -s $DATA/err_exit ]; then $DATA/err_exit "Script RELOCATE_GES failed" @@ -699,43 +688,41 @@ else rm -f RELOCATE_GES cmd if [ "$SENDCOM" = "YES" ]; then - cp rel_inform1 ${COMSP}inform.relocate.$tmmark - cp tcvitals ${COMSP}tcvitals.relocate.$tmmark + cp "rel_inform1" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + cp "tcvitals" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" if [ "$SENDDBN" = "YES" ]; then if test "$RUN" = "gdas1" then - $DBNROOT/bin/dbn_alert MODEL GDAS1_TCI $job ${COMSP}inform.relocate.$tmmark - $DBNROOT/bin/dbn_alert MODEL GDAS1_TCI $job ${COMSP}tcvitals.relocate.$tmmark + "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS1_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + "${DBNROOT}/bin/dbn_alert" "MODEL" "GDAS1_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" fi if test "$RUN" = "gfs" then - $DBNROOT/bin/dbn_alert MODEL GFS_TCI $job ${COMSP}inform.relocate.$tmmark - $DBNROOT/bin/dbn_alert MODEL GFS_TCI $job ${COMSP}tcvitals.relocate.$tmmark + "${DBNROOT}/bin/dbn_alert" "MODEL" "GFS_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.inform.relocate.${tmmark}" + "${DBNROOT}/bin/dbn_alert" "MODEL" "GFS_TCI" "${job}" "${COM_OBS}/${RUN}.${cycle}.tcvitals.relocate.${tmmark}" fi fi fi # -------------------------------------------------------------------------- # Since relocation processing has ended sucessfully (and the center sigma -# guess has been modified), remove ${COMSP}sgesprep_pathname.$tmmark (which +# guess has been modified), remove ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark (which # had earlier had getges center sigma guess pathname written into it - in # case of error or no input tcvitals records found) - the subsequent PREP -# step will correctly update ${COMSP}sgesprep_pathname.$tmmark to point to +# step will correctly update ${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.$tmmark to point to # the sgesprep file updated here by the relocation # -------------------------------------------------------------------------- - rm ${COMSP}sgesprep_pathname.$tmmark + rm "${COM_OBS}/${RUN}.${cycle}.sgesprep_pathname.${tmmark}" - msg="TROPICAL CYCLONE RELOCATION PROCESSING SUCCESSFULLY COMPLETED FOR \ + echo "TROPICAL CYCLONE RELOCATION PROCESSING SUCCESSFULLY COMPLETED FOR \ $CDATE10" - set +u - [ -n "$jlogfile" ] && postmsg "$jlogfile" "$msg" - set -u # end GFDL ges manipulation # ------------------------- fi + exit 0 diff --git a/ush/tropcy_relocate_extrkr.sh b/ush/tropcy_relocate_extrkr.sh index a245dca98e..ede2318c4a 100755 --- a/ush/tropcy_relocate_extrkr.sh +++ b/ush/tropcy_relocate_extrkr.sh @@ -1,8 +1,9 @@ -#!/bin/ksh +#! /usr/bin/env bash + # This script is executed by the script tropcy_relocate.sh # -------------------------------------------------------- -set -aeux +source "$HOMEgfs/ush/preamble.sh" export machine=${machine:-ZEUS} export machine=$(echo $machine|tr '[a-z]' '[A-Z]') @@ -238,7 +239,7 @@ cmodel=$(echo ${cmodel} | tr "[A-Z]" "[a-z]") case ${cmodel} in - gdas) set +x; echo " "; echo " ++ operational GDAS chosen"; set -x; + gdas) set +x; echo " "; echo " ++ operational GDAS chosen"; set_trace; fcstlen=9 ; fcsthrs="" for fhr in $( seq 0 $BKGFREQ 9); do @@ -271,48 +272,48 @@ case ${cmodel} in # jpdtn=0 for deterministic data. g2_jpdtn=0 model=8;; - gfs) set +x; echo " "; echo " ++ operational GFS chosen"; set -x; + gfs) set +x; echo " "; echo " ++ operational GFS chosen"; set_trace; fcsthrsgfs=' 00 06 12 18 24 30 36 42 48 54 60 66 72 78'; gfsdir=$COMIN; gfsgfile=gfs.t${dishh}z.pgrbf; model=1;; - mrf) set +x; echo " "; echo " ++ operational MRF chosen"; set -x; + mrf) set +x; echo " "; echo " ++ operational MRF chosen"; set_trace; fcsthrsmrf=' 00 12 24 36 48 60 72'; mrfdir=$COMIN; mrfgfile=drfmr.t${dishh}z.pgrbf; model=2;; - ukmet) set +x; echo " "; echo " ++ operational UKMET chosen"; set -x; + ukmet) set +x; echo " "; echo " ++ operational UKMET chosen"; set_trace; fcsthrsukmet=' 00 12 24 36 48 60 72'; ukmetdir=$COMIN; ukmetgfile=ukmet.t${dishh}z.ukmet; model=3;; - ecmwf) set +x; echo " "; echo " ++ operational ECMWF chosen"; set -x; + ecmwf) set +x; echo " "; echo " ++ operational ECMWF chosen"; set_trace; fcsthrsecmwf=' 00 24 48 72'; ecmwfdir=$COMIN; ecmwfgfile=ecmgrb25.t12z; model=4;; - ngm) set +x; echo " "; echo " ++ operational NGM chosen"; set -x; + ngm) set +x; echo " "; echo " ++ operational NGM chosen"; set_trace; fcsthrsngm=' 00 06 12 18 24 30 36 42 48'; ngmdir=$COMIN; ngmgfile=ngm.t${dishh}z.pgrb.f; model=5;; - nam) set +x; echo " "; echo " ++ operational Early NAM chosen"; set -x; + nam) set +x; echo " "; echo " ++ operational Early NAM chosen"; set_trace; fcsthrsnam=' 00 06 12 18 24 30 36 42 48'; namdir=$COMIN; namgfile=nam.t${dishh}z.awip32; model=6;; - ngps) set +x; echo " "; echo " ++ operational NAVGEM chosen"; set -x; + ngps) set +x; echo " "; echo " ++ operational NAVGEM chosen"; set_trace; fcsthrsngps=' 00 12 24 36 48 60 72'; #ngpsdir=/com/hourly/prod/hourly.${CENT}${symd}; ngpsdir=$OMIN; ngpsgfile=fnoc.t${dishh}z; model=7;; other) set +x; echo " "; echo " Model selected by user is ${cmodel}, which is a "; - echo "user-defined model, NOT operational...."; echo " "; set -x; + echo "user-defined model, NOT operational...."; echo " "; set_trace; model=9;; *) set +x; echo " "; echo " !!! Model selected is not recognized."; echo " Model= ---> ${cmodel} <--- ..... Please submit the script again...."; - echo " "; set -x; exit 8;; + echo " "; set_trace; exit 8;; esac @@ -376,7 +377,7 @@ if [ ${cmodel} = 'other' ]; then echo " replace the forecast hour characters 00 with XX. Please check the" echo " name in the kickoff script and qsub it again. Exiting....." echo " " - set -x + set_trace exit 8 fi @@ -399,7 +400,7 @@ if [ ${cmodel} = 'other' ]; then echo " " echo " !!! Exiting loop, only processing 14 forecast files ...." echo " " - set -x + set_trace break fi @@ -414,7 +415,7 @@ if [ ${cmodel} = 'other' ]; then echo " " echo " +++ Found file ${fnamebeg}${fhour}${fnameend}" echo " " - set -x + set_trace let fhrct=fhrct+1 else fflag='n' @@ -434,7 +435,7 @@ if [ ${cmodel} = 'other' ]; then echo " !!! Please check the directory to make sure the file" echo " !!! is there and then submit this job again." echo " " - set -x + set_trace exit 8 fi @@ -443,7 +444,7 @@ if [ ${cmodel} = 'other' ]; then echo " Max forecast hour is $maxhour" echo " List of forecast hours: $fcsthrsother" echo " " - set -x + set_trace # -------------------------------------------------- # In order for the fortran program to know how many @@ -525,7 +526,7 @@ if [ ${numvitrecs} -eq 0 ]; then echo "!!! It could just be that there are no storms for the current" echo "!!! time. Please check the dates and submit this job again...." echo " " - set -x + set_trace exit 8 fi @@ -573,19 +574,17 @@ pgm=$(basename $SUPVX) if [ -s $DATA/prep_step ]; then set +e . $DATA/prep_step - set -e + set_strict else [ -f errfile ] && rm errfile export XLFUNITS=0 unset $(env | grep XLFUNIT | awk -F= '{print $1}') - set +u - if [ -z "$XLFRTEOPTS" ]; then + if [ -z "${XLFRTEOPTS:-}" ]; then export XLFRTEOPTS="unit_vars=yes" else export XLFRTEOPTS="${XLFRTEOPTS}:unit_vars=yes" fi - set -u fi @@ -614,14 +613,14 @@ set +x echo echo 'The foreground exit status for SUPVIT is ' $err echo -set -x +set_trace if [ $err -eq 0 ]; then set +x echo " " echo " Normal end for program supvitql (which updates TC vitals file)." echo " " - set -x + set_trace else set +x echo " " @@ -631,7 +630,7 @@ else echo "!!! model= ${cmodel}, forecast initial time = ${symd}${dishh}" echo "!!! Exiting...." echo " " - set -x + set_trace fi if [ -s $DATA/err_chk ]; then $DATA/err_chk @@ -661,7 +660,7 @@ if [ ${numvitrecs} -eq 0 ]; then echo "!!! File ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} is empty." echo "!!! Please check the dates and submit this job again...." echo " " - set -x + set_trace exit 8 fi @@ -677,7 +676,7 @@ echo " Below is a list of the storms to be processed: " | tee -a storm_list echo " " | tee -a storm_list cat ${vdir}/vitals.upd.${cmodel}.${symd}${dishh} | tee -a storm_list echo " " | tee -a storm_list -set -x +set_trace set +u [ -n "../$pgmout" ] && cat storm_list >> ../$pgmout @@ -730,7 +729,7 @@ echo " NOW CUTTING APART INPUT GRIB FILES TO " echo " CREATE 1 BIG GRIB INPUT FILE " echo " -----------------------------------------" echo " " -set -x +set_trace #grid='255 0 151 71 70000 190000 128 0000 340000 1000 1000 64' #grid='255 0 360 181 90000 0000 128 -90000 -1000 1000 1000 64' @@ -757,7 +756,7 @@ if [ ${model} -eq 5 ]; then echo " !!! in the analysis data." echo " *******************************************************************" echo " " - set -x + set_trace fi if [ -s ${vdir}/ngmlatlon.pgrb.${symd}${dishh} ]; then @@ -773,7 +772,7 @@ if [ ${model} -eq 5 ]; then echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " !!! NGM File missing: ${ngmdir}/${ngmgfile}${fhour}" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" - set -x + set_trace continue fi if [ -s $TMPDIR/tmpixfile ]; then rm $TMPDIR/tmpixfile; fi @@ -784,7 +783,7 @@ if [ ${model} -eq 5 ]; then echo " " echo " Extracting NGM GRIB data for forecast hour = $fhour" echo " " - set -x + set_trace g1=${ngmdir}/${ngmgfile}${fhour} @@ -808,7 +807,7 @@ if [ ${model} -eq 5 ]; then echo "!!! sure you've allocated enough memory for this job (error 134 using $COPYGB is " echo "!!! typically due to using more memory than you've allocated). Exiting....." echo " " - set -x + set_trace exit 8 fi @@ -847,7 +846,7 @@ if [ ${model} -eq 6 ]; then echo " !!! in the analysis data." echo " *******************************************************************" echo " " - set -x + set_trace fi if [ -s ${vdir}/namlatlon.pgrb.${symd}${dishh} ]; then @@ -863,7 +862,7 @@ if [ ${model} -eq 6 ]; then echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " !!! Early NAM File missing: ${namdir}/${namgfile}${fhour}.tm00" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" - set -x + set_trace continue fi if [ -s $TMPDIR/tmpixfile ]; then rm $TMPDIR/tmpixfile; fi @@ -874,7 +873,7 @@ if [ ${model} -eq 6 ]; then echo " " echo " Extracting Early NAM GRIB data for forecast hour = $fhour" echo " " - set -x + set_trace g1=${namdir}/${namgfile}${fhour}.tm00 @@ -899,7 +898,7 @@ if [ ${model} -eq 6 ]; then echo "!!! sure you've allocated enough memory for this job (error 134 using $COPYGB is " echo "!!! typically due to using more memory than you've allocated). Exiting....." echo " " - set -x + set_trace exit 8 fi @@ -947,7 +946,7 @@ if [ ${model} -eq 4 ]; then echo " " echo " !!! Due to missing ECMWF file, execution is ending...." echo " " - set -x + set_trace exit 8 fi @@ -990,7 +989,7 @@ if [ ${model} -eq 1 ]; then echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " !!! GFS File missing: ${gfsdir}/${gfsgfile}${fhour}" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" - set -x + set_trace continue fi @@ -1061,7 +1060,7 @@ if [ ${model} -eq 8 ]; then echo " !!! gdas File missing: $gfile" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + set_trace continue fi @@ -1110,7 +1109,7 @@ if [ ${model} -eq 8 ]; then echo " !!! gdas File missing: $gfile" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + set_trace continue fi @@ -1165,7 +1164,7 @@ if [ ${model} -eq 2 ]; then echo " !!! MRF File missing: ${mrfdir}/${mrfgfile}${fhour}" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + set_trace continue fi @@ -1220,7 +1219,7 @@ if [ ${model} -eq 3 ]; then echo " !!! UKMET File missing: ${ukmetdir}/${ukmetgfile}${fhour}" echo " !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + set_trace continue fi @@ -1261,7 +1260,7 @@ if [ ${model} -eq 7 ]; then echo " " echo " !!! Due to missing NAVGEM file, execution is ending...." echo " " - set -x + set_trace exit 8 fi @@ -1336,7 +1335,7 @@ if [ ${model} -eq 9 ]; then echo "!!! Forecast File missing: ${otherdir}/${fnamebeg}00${fnameend}" echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!" echo " " - set -x + set_trace continue fi @@ -1410,7 +1409,7 @@ if [ ${model} -eq 9 ]; then echo "!!! sure you've allocated enough memory for this job (error 134 using $COPYGB is " echo "!!! typically due to using more memory than you've allocated). Exiting....." echo " " - set -x + set_trace exit 8 fi @@ -1441,9 +1440,9 @@ while [ $ist -le 15 ] do if [ ${stormflag[${ist}]} -ne 1 ] then - set +x; echo "Storm number $ist NOT selected for processing"; set -x + set +x; echo "Storm number $ist NOT selected for processing"; set_trace else - set +x; echo "Storm number $ist IS selected for processing...."; set -x + set +x; echo "Storm number $ist IS selected for processing...."; set_trace fi let ist=ist+1 done @@ -1562,7 +1561,7 @@ set +x echo echo 'The foreground exit status for GETTRK is ' $err echo -set -x +set_trace if [ -s $DATA/err_chk ]; then $DATA/err_chk @@ -1581,5 +1580,6 @@ fi cp ${vdir}/trak.${cmodel}.all.${symdh} ${DATA}/model_track.all + exit 0 diff --git a/ush/wave_grib2_sbs.sh b/ush/wave_grib2_sbs.sh index 262d258012..8511515abb 100755 --- a/ush/wave_grib2_sbs.sh +++ b/ush/wave_grib2_sbs.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -17,71 +17,62 @@ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # -# Requirements: -# - wgrib2 with IPOLATES library -# +# Requirements: +# - wgrib2 with IPOLATES library +# ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "${HOMEgfs}/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +# 0.a Basic modes of operation - cd $GRIBDATA -# postmsg "$jlogfile" "Making GRIB2 Files." # commented to reduce unnecessary output to jlogfile +cd "${GRIBDATA}" || exit 2 - alertName=$(echo $RUN|tr [a-z] [A-Z]) +alertName=${RUN^^} - grdID=$1 - gribDIR=${grdID}_grib - rm -rfd ${gribDIR} - mkdir ${gribDIR} - err=$? - if [ $err != 0 ] - then - set +x - echo ' ' - echo '******************************************************************************* ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 (COULD NOT CREATE TEMP DIRECTORY) *** ' - echo '******************************************************************************* ' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_grib2 (Could not create temp directory)" - exit 1 - fi +grdID=$1 +gribDIR="${grdID}_grib" +rm -rfd "${gribDIR}" +mkdir "${gribDIR}" +err=$? +if [[ ${err} != 0 ]]; then + set +x + echo ' ' + echo '******************************************************************************* ' + echo '*** FATAL ERROR : ERROR IN ww3_grib2 (COULD NOT CREATE TEMP DIRECTORY) *** ' + echo '******************************************************************************* ' + echo ' ' + set_trace + exit 1 +fi - cd ${gribDIR} +cd "${gribDIR}" || exit 2 # 0.b Define directories and the search path. # The tested variables should be exported by the postprocessor script. - GRIDNR=$2 - MODNR=$3 - ymdh=$4 - fhr=$5 - grdnam=$6 - grdres=$7 - gribflags=$8 - ngrib=1 # only one time slice - dtgrib=3600 # only one time slice +GRIDNR=$2 +MODNR=$3 +ymdh=$4 +fhr=$5 +grdnam=$6 +grdres=$7 +gribflags=$8 +ngrib=1 # only one time slice +dtgrib=3600 # only one time slice # SBS one time slice per file - FH3=$(printf %03i $fhr) +FH3=$(printf %03i "${fhr}") # Verify if grib2 file exists from interrupted run - ENSTAG="" - if [ ${waveMEMB} ]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi - outfile=${WAV_MOD_TAG}.${cycle}${ENSTAG}.${grdnam}.${grdres}.f${FH3}.grib2 +ENSTAG="" +if [[ -n ${waveMEMB} ]]; then ENSTAG=".${membTAG}${waveMEMB}" ; fi +outfile="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${grdnam}.${grdres}.f${FH3}.grib2" # Only create file if not present in COM - if [ ! -s ${COMOUT}/gridded/${outfile}.idx ]; then +if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then set +x echo ' ' @@ -89,188 +80,175 @@ echo '! Make GRIB files |' echo '+--------------------------------+' echo " Model ID : $WAV_MOD_TAG" - [[ "$LOUD" = YES ]] && set -x + set_trace - if [ -z "$CDATE" ] || [ -z "$cycle" ] || [ -z "$EXECwave" ] || \ - [ -z "$COMOUT" ] || [ -z "$WAV_MOD_TAG" ] || [ -z "$SENDCOM" ] || \ - [ -z "$gribflags" ] || \ - [ -z "$GRIDNR" ] || [ -z "$MODNR" ] || [ -z "$SENDDBN" ] - then + if [[ -z "${PDY}" ]] || [[ -z ${cyc} ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \ + [[ -z "${COM_WAVE_GRID}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDCOM}" ]] || \ + [[ -z "${gribflags}" ]] || [[ -z "${GRIDNR}" ]] || [[ -z "${MODNR}" ]] || \ + [[ -z "${SENDDBN}" ]]; then set +x echo ' ' echo '***************************************************' echo '*** EXPORTED VARIABLES IN postprocessor NOT SET ***' echo '***************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "EXPORTED VARIABLES IN postprocessor NOT SET" + set_trace exit 1 fi -# 0.c Starting time for output + # 0.c Starting time for output - tstart="$(echo $ymdh | cut -c1-8) $(echo $ymdh | cut -c9-10)0000" + tstart="${ymdh:0:8} ${ymdh:8:2}0000" set +x - echo " Starting time : $tstart" - echo " Time step : Single SBS - echo " Number of times : Single SBS - echo " GRIB field flags : $gribflags" + echo " Starting time : ${tstart}" + echo " Time step : Single SBS" + echo " Number of times : Single SBS" + echo " GRIB field flags : ${gribflags}" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace -# 0.e Links to working directory + # 0.e Links to working directory - ln -s ${DATA}/mod_def.$grdID mod_def.ww3 - ln -s ${DATA}/output_${ymdh}0000/out_grd.$grdID out_grd.ww3 + ln -s "${DATA}/mod_def.${grdID}" "mod_def.ww3" + ln -s "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "out_grd.ww3" -# --------------------------------------------------------------------------- # -# 1. Generate GRIB file with all data -# 1.a Generate input file for ww3_grib2 -# Template copied in mother script ... + # --------------------------------------------------------------------------- # + # 1. Generate GRIB file with all data + # 1.a Generate input file for ww3_grib2 + # Template copied in mother script ... set +x echo " Generate input file for ww3_grib2" - [[ "$LOUD" = YES ]] && set -x + set_trace - sed -e "s/TIME/$tstart/g" \ - -e "s/DT/$dtgrib/g" \ - -e "s/NT/$ngrib/g" \ - -e "s/GRIDNR/$GRIDNR/g" \ - -e "s/MODNR/$MODNR/g" \ - -e "s/FLAGS/$gribflags/g" \ - ${DATA}/ww3_grib2.${grdID}.inp.tmpl > ww3_grib.inp + sed -e "s/TIME/${tstart}/g" \ + -e "s/DT/${dtgrib}/g" \ + -e "s/NT/${ngrib}/g" \ + -e "s/GRIDNR/${GRIDNR}/g" \ + -e "s/MODNR/${MODNR}/g" \ + -e "s/FLAGS/${gribflags}/g" \ + "${DATA}/ww3_grib2.${grdID}.inp.tmpl" > ww3_grib.inp - echo "ww3_grib.inp" + echo "ww3_grib.inp" cat ww3_grib.inp -# 1.b Run GRIB packing program + + # 1.b Run GRIB packing program set +x echo " Run ww3_grib2" - echo " Executing $EXECwave/ww3_grib" - [[ "$LOUD" = YES ]] && set -x + echo " Executing ${EXECwave}/ww3_grib" + set_trace export pgm=ww3_grib;. prep_step - $EXECwave/ww3_grib > grib2_${grdnam}_${FH3}.out 2>&1 + "${EXECwave}/ww3_grib" > "grib2_${grdnam}_${FH3}.out" 2>&1 export err=$?;err_chk - if [ ! -s gribfile ]; then - set +x - echo ' ' - echo '************************************************ ' - echo '*** FATAL ERROR : ERROR IN ww3_grib encoding *** ' - echo '************************************************ ' - echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_grib2" - exit 3 - fi - - if [ $fht -gt 0 ]; then - $WGRIB2 gribfile -set_date $CDATE -set_ftime "$fhr hour fcst" -grib ${COMOUT}/gridded/${outfile} + if [ ! -s gribfile ]; then + set +x + echo ' ' + echo '************************************************ ' + echo '*** FATAL ERROR : ERROR IN ww3_grib encoding *** ' + echo '************************************************ ' + echo ' ' + set_trace + exit 3 + fi + + if (( fhr > 0 )); then + ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" -grib "${COM_WAVE_GRID}/${outfile}" err=$? - else - $WGRIB2 gribfile -set_date $CDATE -set_ftime "$fhr hour fcst" -set table_1.4 1 -set table_1.2 1 -grib ${COMOUT}/gridded/${outfile} + else + ${WGRIB2} gribfile -set_date "${PDY}${cyc}" -set_ftime "${fhr} hour fcst" \ + -set table_1.4 1 -set table_1.2 1 -grib "${COM_WAVE_GRID}/${outfile}" err=$? - fi + fi - if [ $err != 0 ] - then + if [[ ${err} != 0 ]]; then set +x echo ' ' echo '********************************************* ' echo '*** FATAL ERROR : ERROR IN ww3_grib2 *** ' echo '********************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_grib2" + set_trace exit 3 fi -# Create index - $WGRIB2 -s $COMOUT/gridded/${outfile} > $COMOUT/gridded/${outfile}.idx + # Create index + ${WGRIB2} -s "${COM_WAVE_GRID}/${outfile}" > "${COM_WAVE_GRID}/${outfile}.idx" -# Create grib2 subgrid is this is the source grid - if [ "${grdID}" = "${WAV_SUBGRBSRC}" ]; then + # Create grib2 subgrid is this is the source grid + if [[ "${grdID}" = "${WAV_SUBGRBSRC}" ]]; then for subgrb in ${WAV_SUBGRB}; do subgrbref=$(echo ${!subgrb} | cut -d " " -f 1-20) subgrbnam=$(echo ${!subgrb} | cut -d " " -f 21) subgrbres=$(echo ${!subgrb} | cut -d " " -f 22) subfnam="${WAV_MOD_TAG}.${cycle}${ENSTAG}.${subgrbnam}.${subgrbres}.f${FH3}.grib2" - $COPYGB2 -g "${subgrbref}" -i0 -x ${COMOUT}/gridded/${outfile} ${COMOUT}/gridded/${subfnam} - $WGRIB2 -s $COMOUT/gridded/${subfnam} > $COMOUT/gridded/${subfnam}.idx + ${COPYGB2} -g "${subgrbref}" -i0 -x "${COM_WAVE_GRID}/${outfile}" "${COM_WAVE_GRID}/${subfnam}" + ${WGRIB2} -s "${COM_WAVE_GRID}/${subfnam}" > "${COM_WAVE_GRID}/${subfnam}.idx" done fi -# 1.e Save in /com - - if [ ! -s $COMOUT/gridded/${outfile} ] - then - set +x - echo ' ' - echo '********************************************* ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 *** ' - echo '********************************************* ' - echo ' ' - echo " Error in moving grib file ${outfile} to com" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_grib2" - exit 4 - fi - if [ ! -s $COMOUT/gridded/${outfile} ] - then - set +x - echo ' ' - echo '*************************************************** ' - echo '*** FATAL ERROR : ERROR IN ww3_grib2 INDEX FILE *** ' - echo '*************************************************** ' - echo ' ' - echo " Error in moving grib file ${outfile}.idx to com" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN creating ww3_grib2 index" - exit 4 - fi - - if [[ "$SENDDBN" = 'YES' ]] && [[ ${outfile} != *global.0p50* ]] - then - set +x - echo " Alerting GRIB file as $COMOUT/gridded/${outfile}" - echo " Alerting GRIB index file as $COMOUT/gridded/${outfile}.idx" - [[ "$LOUD" = YES ]] && set -x - $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_GB2 $job $COMOUT/gridded/${outfile} - $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_GB2_WIDX $job $COMOUT/gridded/${outfile}.idx - else - echo "${outfile} is global.0p50, not alert out" - fi - - -# --------------------------------------------------------------------------- # -# 3. Clean up the directory + # 1.e Save in /com + + if [[ ! -s "${COM_WAVE_GRID}/${outfile}" ]]; then + set +x + echo ' ' + echo '********************************************* ' + echo '*** FATAL ERROR : ERROR IN ww3_grib2 *** ' + echo '********************************************* ' + echo ' ' + echo " Error in moving grib file ${outfile} to com" + echo ' ' + set_trace + exit 4 + fi + if [[ ! -s "${COM_WAVE_GRID}/${outfile}.idx" ]]; then + set +x + echo ' ' + echo '*************************************************** ' + echo '*** FATAL ERROR : ERROR IN ww3_grib2 INDEX FILE *** ' + echo '*************************************************** ' + echo ' ' + echo " Error in moving grib file ${outfile}.idx to com" + echo ' ' + set_trace + exit 4 + fi + + if [[ "${SENDDBN}" = 'YES' ]] && [[ ${outfile} != *global.0p50* ]]; then + set +x + echo " Alerting GRIB file as ${COM_WAVE_GRID}/${outfile}" + echo " Alerting GRIB index file as ${COM_WAVE_GRID}/${outfile}.idx" + set_trace + "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2" "${job}" "${COM_WAVE_GRID}/${outfile}" + "${DBNROOT}/bin/dbn_alert" MODEL "${alertName}_WAVE_GB2_WIDX" "${job}" "${COM_WAVE_GRID}/${outfile}.idx" + else + echo "${outfile} is global.0p50 or SENDDBN is NO, no alert sent" + fi + + + # --------------------------------------------------------------------------- # + # 3. Clean up the directory rm -f gribfile set +x echo " Removing work directory after success." - [[ "$LOUD" = YES ]] && set -x + set_trace cd ../ - mv -f ${gribDIR} done.${gribDIR} - - else - set +x - echo ' ' - echo " File ${COMOUT}/gridded/${outfile} found, skipping generation process" - echo ' ' - [[ "$LOUD" = YES ]] && set -x - fi + mv -f "${gribDIR}" "done.${gribDIR}" +else set +x echo ' ' - echo "End of ww3_grib2.sh at" - date - [[ "$LOUD" = YES ]] && set -x + echo " File ${COM_WAVE_GRID}/${outfile} found, skipping generation process" + echo ' ' + set_trace +fi + # End of ww3_grib2.sh -------------------------------------------------- # diff --git a/ush/wave_grid_interp_sbs.sh b/ush/wave_grid_interp_sbs.sh index 8d696c1d74..7fa8d9d7f3 100755 --- a/ush/wave_grid_interp_sbs.sh +++ b/ush/wave_grid_interp_sbs.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -17,31 +17,25 @@ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # -# Requirements: -# - wgrib2 with IPOLATES library -# +# Requirements: +# - wgrib2 with IPOLATES library +# ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +# 0.a Basic modes of operation cd $GRDIDATA - grdID=$1 + grdID=$1 ymdh=$2 dt=$3 nst=$4 - postmsg "$jlogfile" "Making GRID Interpolation Files for $grdID." + echo "Making GRID Interpolation Files for $grdID." rm -rf grint_${grdID}_${ymdh} mkdir grint_${grdID}_${ymdh} err=$? @@ -54,8 +48,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_grid_interp (COULD NOT CREATE TEMP DIRECTORY) *** ' echo '************************************************************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_grid_interp (Could not create temp directory)" + set_trace exit 1 fi @@ -70,11 +63,11 @@ echo '! Make GRID files |' echo '+--------------------------------+' echo " Model ID : $WAV_MOD_TAG" - [[ "$LOUD" = YES ]] && set -x + set_trace - if [ -z "$CDATE" ] || [ -z "$cycle" ] || [ -z "$EXECwave" ] || \ - [ -z "$COMOUT" ] || [ -z "$WAV_MOD_TAG" ] || [ -z "$SENDCOM" ] || \ - [ -z "$SENDDBN" ] || [ -z "$waveGRD" ] + if [[ -z "${PDY}" ]] || [[ -z "${cyc}" ]] || [[ -z "${cycle}" ]] || [[ -z "${EXECwave}" ]] || \ + [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${SENDCOM}" ]] || \ + [[ -z "${SENDDBN}" ]] || [ -z "${waveGRD}" ] then set +x echo ' ' @@ -82,27 +75,26 @@ echo '*** EXPORTED VARIABLES IN postprocessor NOT SET ***' echo '***************************************************' echo ' ' - echo "$CDATE $cycle $EXECwave $COMOUT $WAV_MOD_TAG $SENDCOM $SENDDBN $waveGRD" - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "EXPORTED VARIABLES IN postprocessor NOT SET" + echo "${PDY}${cyc} ${cycle} ${EXECwave} ${COM_WAVE_PREP} ${WAV_MOD_TAG} ${SENDCOM} ${SENDDBN} ${waveGRD}" + set_trace exit 1 fi # 0.c Links to files rm -f ${DATA}/output_${ymdh}0000/out_grd.$grdID - + if [ ! -f ${DATA}/${grdID}_interp.inp.tmpl ]; then cp $PARMwave/${grdID}_interp.inp.tmpl ${DATA} fi - ln -sf ${DATA}/${grdID}_interp.inp.tmpl . + ln -sf ${DATA}/${grdID}_interp.inp.tmpl . for ID in $waveGRD do ln -sf ${DATA}/output_${ymdh}0000/out_grd.$ID . done - for ID in $waveGRD $grdID + for ID in $waveGRD $grdID do ln -sf ${DATA}/mod_def.$ID . done @@ -111,7 +103,7 @@ # 1. Generate GRID file with all data # 1.a Generate Input file - time="$(echo $ymdh | cut -c1-8) $(echo $ymdh | cut -c9-10)0000" + time="${ymdh:0:8} ${ymdh:8:2}0000" sed -e "s/TIME/$time/g" \ -e "s/DT/$dt/g" \ @@ -120,25 +112,25 @@ # Check if there is an interpolation weights file available wht_OK='no' - if [ ! -f ${DATA}/WHTGRIDINT.bin.${grdID} ]; then - if [ -f $FIXwave/WHTGRIDINT.bin.${grdID} ] + if [ ! -f ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ]; then + if [ -f $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} ] then set +x echo ' ' - echo " Copying $FIXwave/WHTGRIDINT.bin.${grdID} " - [[ "$LOUD" = YES ]] && set -x - cp $FIXwave/WHTGRIDINT.bin.${grdID} ${DATA} + echo " Copying $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} " + set_trace + cp $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} ${DATA} wht_OK='yes' else set +x echo ' ' - echo " Not found: $FIXwave/WHTGRIDINT.bin.${grdID} " + echo " Not found: $FIXwave/ww3_gint.WHTGRIDINT.bin.${grdID} " fi fi # Check and link weights file - if [ -f ${DATA}/WHTGRIDINT.bin.${grdID} ] + if [ -f ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ] then - ln -s ${DATA}/WHTGRIDINT.bin.${grdID} ./WHTGRIDINT.bin + ln -s ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} ./WHTGRIDINT.bin fi # 1.b Run interpolation code @@ -146,7 +138,7 @@ set +x echo " Run ww3_gint echo " Executing $EXECwave/ww3_gint - [[ "$LOUD" = YES ]] && set -x + set_trace export pgm=ww3_gint;. prep_step $EXECwave/ww3_gint 1> gint.${grdID}.out 2>&1 @@ -155,10 +147,10 @@ # Write interpolation file to main TEMP dir area if not there yet if [ "wht_OK" = 'no' ] then - cp -f ./WHTGRIDINT.bin ${DATA}/WHTGRIDINT.bin.${grdID} - cp -f ./WHTGRIDINT.bin ${FIXwave}/WHTGRIDINT.bin.${grdID} + cp -f ./WHTGRIDINT.bin ${DATA}/ww3_gint.WHTGRIDINT.bin.${grdID} + cp -f ./WHTGRIDINT.bin ${FIXwave}/ww3_gint.WHTGRIDINT.bin.${grdID} fi - + if [ "$err" != '0' ] then @@ -168,8 +160,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_gint interpolation * ' echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_gint interpolation" + set_trace exit 3 fi @@ -184,36 +175,27 @@ if [ "$SENDCOM" = 'YES' ] then set +x - echo " Saving GRID file as $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE}" - [[ "$LOUD" = YES ]] && set -x - cp ${DATA}/output_${ymdh}0000/out_grd.$grdID $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE} + echo " Saving GRID file as ${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" + set_trace + cp "${DATA}/output_${ymdh}0000/out_grd.${grdID}" "${COM_WAVE_PREP}/${WAV_MOD_TAG}.out_grd.${grdID}.${PDY}${cyc}" # if [ "$SENDDBN" = 'YES' ] # then # set +x -# echo " Alerting GRID file as $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${CDATE} -# [[ "$LOUD" = YES ]] && set -x +# echo " Alerting GRID file as $COMOUT/rundata/$WAV_MOD_TAG.out_grd.$grdID.${PDY}${cyc} +# set_trace # # PUT DBNET ALERT HERE .... # # fi - fi + fi # --------------------------------------------------------------------------- # # 2. Clean up the directory - set +x - echo " Removing work directory after success." - [[ "$LOUD" = YES ]] && set -x - cd ../ mv -f grint_${grdID}_${ymdh} done.grint_${grdID}_${ymdh} - set +x - echo ' ' - echo "End of ww3_interp.sh at" - date - # End of ww3_grid_interp.sh -------------------------------------------- # diff --git a/ush/wave_grid_moddef.sh b/ush/wave_grid_moddef.sh index 640769f991..5b1b212a16 100755 --- a/ush/wave_grid_moddef.sh +++ b/ush/wave_grid_moddef.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -19,17 +19,12 @@ # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x +# 0.a Basic modes of operation - postmsg "$jlogfile" "Generating mod_def file" + echo "Generating mod_def file" mkdir -p moddef_${1} cd moddef_${1} @@ -43,7 +38,7 @@ echo '+--------------------------------+' echo " Grid : $1" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # 0.b Check if grid set @@ -55,8 +50,7 @@ echo '*** Grid not identifife in ww3_mod_def.sh ***' echo '**************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "GRID IN ww3_mod_def.sh NOT SET" + set_trace exit 1 else grdID=$1 @@ -73,8 +67,7 @@ echo '*** EXPORTED VARIABLES IN ww3_mod_def.sh NOT SET ***' echo '*********************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "EXPORTED VARIABLES IN ww3_mod_def.sh NOT SET" + set_trace exit 2 fi @@ -86,7 +79,7 @@ echo ' Creating mod_def file ...' echo " Executing $EXECwave/ww3_grid" echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace rm -f ww3_grid.inp ln -sf ../ww3_grid.inp.$grdID ww3_grid.inp @@ -102,14 +95,13 @@ echo '*** FATAL ERROR : ERROR IN ww3_grid *** ' echo '******************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_grid" + set_trace exit 3 fi if [ -f mod_def.ww3 ] then - cp mod_def.ww3 $COMOUT/rundata/${CDUMP}wave.mod_def.${grdID} + cp mod_def.ww3 "${COM_WAVE_PREP}/${RUN}wave.mod_def.${grdID}" mv mod_def.ww3 ../mod_def.$grdID else set +x @@ -118,20 +110,14 @@ echo '*** FATAL ERROR : MOD DEF FILE NOT FOUND *** ' echo '******************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : Mod def File creation FAILED" + set_trace exit 4 fi # --------------------------------------------------------------------------- # # 3. Clean up - cd .. - rm -rf moddef_$grdID - - set +x - echo ' ' - echo 'End of ww3_mod_def.sh at' - date +cd .. +rm -rf moddef_$grdID # End of ww3_mod_def.sh ------------------------------------------------- # diff --git a/ush/wave_outp_cat.sh b/ush/wave_outp_cat.sh index 358ff9055c..f4bf6b2294 100755 --- a/ush/wave_outp_cat.sh +++ b/ush/wave_outp_cat.sh @@ -1,36 +1,29 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block -# Script name: wave_outp_cat.sh -# Script description: Gathers ASCII data files for all fhr for each buoy +# Script name: wave_outp_cat.sh +# Script description: Gathers ASCII data files for all fhr for each buoy # # Author: Jessica Meixner Org: NCEP/EMC Date: 2020-08-27 -# Abstract: Cats spec files from each fhr into one for each buoy +# Abstract: Cats spec files from each fhr into one for each buoy # # Script history log: -# 2020-08-27 Jessica Meixner creation of script +# 2020-08-27 Jessica Meixner creation of script # # $Id$ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x - +# 0.a Basic modes of operation bloc=$1 MAXHOUR=$2 specdir=$3 @@ -45,8 +38,7 @@ echo '*** LOCATION ID IN ww3_outp_spec.sh NOT SET ***' echo '***********************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "LOCATION ID IN ww3_outp_cat.sh NOT SET" + set_trace exit 1 else buoy=$bloc @@ -64,27 +56,30 @@ echo '*** EXPORTED VARIABLES IN ww3_outp_cat.sh NOT SET ***' echo '******************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "EXPORTED VARIABLES IN ww3_outp_cat.sh NOT SET" + set_trace exit 3 fi # --------------------------------------------------------------------------- # -# 1. Cat for a buoy all fhr into one file +# 1. Cat for a buoy all fhr into one file set +x echo " Generate input file for ww3_outp." - [[ "$LOUD" = YES ]] && set -x + set_trace if [ "$specdir" = "bull" ] then outfile=${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.bull coutfile=${STA_DIR}/c${specdir}/$WAV_MOD_TAG.$buoy.cbull - rm outfile coutfile + for f in outfile coutfile; do + if [[ -f ${f} ]]; then rm ${f}; fi + done else outfile=${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.spec - rm outfile + if [[ -f ${outfile} ]]; then + rm ${outfile} + fi fi fhr=$FHMIN_WAV @@ -96,9 +91,9 @@ then outfilefhr=${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.bull coutfilefhr=${STA_DIR}/c${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.cbull - else + else outfilefhr=${STA_DIR}/${specdir}fhr/$WAV_MOD_TAG.${ymdh}.$buoy.spec - fi + fi if [ -f $outfilefhr ] then @@ -107,7 +102,7 @@ cat $outfilefhr >> ${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.bull cat $coutfilefhr >> ${STA_DIR}/c${specdir}/$WAV_MOD_TAG.$buoy.cbull rm $outfilefhr $coutfilefhr - else + else cat $outfilefhr >> ${STA_DIR}/${specdir}/$WAV_MOD_TAG.$buoy.spec #rm $outfilefhr fi @@ -118,7 +113,7 @@ echo "*** FATAL ERROR : OUTPUT DATA FILE FOR BOUY $bouy at ${ymdh} NOT FOUND *** " echo '************************************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=2; export err;${errchk} exit $err fi @@ -142,14 +137,9 @@ echo " FATAL ERROR : OUTPUTFILE ${outfile} not created " echo '*************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace err=2; export err;${errchk} exit $err fi - set +x - echo ' ' - echo 'End of ww3_outp_cat.sh at' - date - # End of ww3_outp_cat.sh ---------------------------------------------------- # diff --git a/ush/wave_outp_spec.sh b/ush/wave_outp_spec.sh index e7691e2908..5acc0f95ab 100755 --- a/ush/wave_outp_spec.sh +++ b/ush/wave_outp_spec.sh @@ -1,5 +1,5 @@ -#!/bin/bash -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -17,21 +17,14 @@ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # ################################################################################ # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set +x - +# 0.a Basic modes of operation bloc=$1 ymdh=$2 specdir=$3 @@ -52,8 +45,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_outp_spec (COULD NOT CREATE TEMP DIRECTORY) *** ' echo '****************************************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_outp_spec (Could not create temp directory)" + set_trace exit 1 fi @@ -65,7 +57,7 @@ echo '! Make spectral file |' echo '+--------------------------------+' echo " Model ID : $WAV_MOD_TAG" - [[ "$LOUD" = YES ]] && set -x + set_trace # 0.b Check if buoy location set @@ -77,8 +69,7 @@ echo '*** LOCATION ID IN ww3_outp_spec.sh NOT SET ***' echo '***********************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "LOCATION ID IN ww3_outp_spec.sh NOT SET" + set_trace exit 1 else buoy=$bloc @@ -93,7 +84,7 @@ echo " Location ID/# : $buoy (${point})" echo " Spectral output start time : $ymdh " echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace break fi done < tmp_list.loc @@ -104,8 +95,7 @@ echo '*** LOCATION ID IN ww3_outp_spec.sh NOT RECOGNIZED ***' echo '******************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "LOCATION ID IN ww3_outp_spec.sh NOT RECOGNIZED" + set_trace exit 2 fi fi @@ -123,8 +113,7 @@ echo '*** EXPORTED VARIABLES IN ww3_outp_spec.sh NOT SET ***' echo '******************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "EXPORTED VARIABLES IN ww3_outp_spec.sh NOT SET" + set_trace exit 3 fi @@ -136,7 +125,7 @@ set +x echo " Output starts at $tstart." echo ' ' - [[ "$LOUD" = YES ]] && set -x + set_trace # 0.e sync important files @@ -155,7 +144,7 @@ set +x echo " Generate input file for ww3_outp." - [[ "$LOUD" = YES ]] && set -x + set_trace if [ "$specdir" = "bull" ] then @@ -182,7 +171,7 @@ set +x echo " Executing $EXECwave/ww3_outp" - [[ "$LOUD" = YES ]] && set -x + set_trace export pgm=ww3_outp;. prep_step $EXECwave/ww3_outp 1> outp_${specdir}_${buoy}.out 2>&1 @@ -197,8 +186,7 @@ echo '*** FATAL ERROR : ERROR IN ww3_outp *** ' echo '******************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : ERROR IN ww3_outp" + set_trace exit 4 fi @@ -242,19 +230,13 @@ echo '*** FATAL ERROR : OUTPUT DATA FILE FOR BOUY $bouy NOT FOUND *** ' echo '***************************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : OUTPUT DATA FILE FOR BOUY $bouy NOT FOUND" + set_trace exit 5 fi # 3.b Clean up the rest - cd .. - rm -rf ${specdir}_${bloc} - - set +x - echo ' ' - echo 'End of ww3_outp_spec.sh at' - date +cd .. +rm -rf ${specdir}_${bloc} # End of ww3_outp_spec.sh ---------------------------------------------------- # diff --git a/ush/wave_prnc_cur.sh b/ush/wave_prnc_cur.sh index fe1855064e..6b1ab19db2 100755 --- a/ush/wave_prnc_cur.sh +++ b/ush/wave_prnc_cur.sh @@ -1,5 +1,5 @@ -#!/bin/sh -# +#! /usr/bin/env bash + ################################################################################ # # UNIX Script Documentation Block @@ -12,57 +12,58 @@ # Script history log: # 2019-10-02 J-Henrique Alves: origination, first version # 2019-11-02 J-Henrique Alves Ported to global-workflow. -# 2020-06-10 J-Henrique Alves Ported R&D machine Hera +# 2020-06-10 J-Henrique Alves Ported R&D machine Hera # # $Id$ # # Attributes: # Language: Bourne-again (BASH) shell -# Machine: WCOSS-DELL-P3 # ################################################################################ # -set -x + +source "$HOMEgfs/ush/preamble.sh" ymdh_rtofs=$1 curfile=$2 fhr=$3 flagfirst=$4 fh3=$(printf "%03d" "${fhr#0}") +fext='f' # Timing has to be made relative to the single 00z RTOFS cycle for that PDY mkdir -p rtofs_${ymdh_rtofs} cd rtofs_${ymdh_rtofs} -ncks -x -v sst,sss,layer_density $curfile cur_uv_${PDY}_${fext}${fh3}.nc -ncks -O -a -h -x -v Layer cur_uv_${PDY}_${fext}${fh3}.nc cur_temp1.nc +ncks -x -v sst,sss,layer_density "${curfile} cur_uv_${PDY}_${fext}${fh3}.nc" +ncks -O -a -h -x -v Layer "cur_uv_${PDY}_${fext}${fh3}.nc" "cur_temp1.nc" ncwa -h -O -a Layer cur_temp1.nc cur_temp2.nc ncrename -h -O -v MT,time -d MT,time cur_temp2.nc ncks -v u_velocity,v_velocity cur_temp2.nc cur_temp3.nc -mv -f cur_temp3.nc cur_uv_${PDY}_${fext}${fh3}_flat.nc +mv -f "cur_temp3.nc" "cur_uv_${PDY}_${fext}${fh3}_flat.nc" # Convert to regular lat lon file # If weights need to be regenerated due to CDO ver change, use: # $CDO genbil,r4320x2160 rtofs_glo_2ds_f000_3hrly_prog.nc weights.nc cp ${FIXwave}/weights_rtofs_to_r4320x2160.nc ./weights.nc - + # Interpolate to regular 5 min grid -$CDO remap,r4320x2160,weights.nc cur_uv_${PDY}_${fext}${fh3}_flat.nc cur_5min_01.nc +${CDO} remap,r4320x2160,weights.nc "cur_uv_${PDY}_${fext}${fh3}_flat.nc" "cur_5min_01.nc" # Perform 9-point smoothing twice to make RTOFS data less noisy when -# interpolating from 1/12 deg RTOFS grid to 1/6 deg wave grid +# interpolating from 1/12 deg RTOFS grid to 1/6 deg wave grid if [ "WAV_CUR_CDO_SMOOTH" = "YES" ]; then - $CDO -f nc -smooth9 cur_5min_01.nc cur_5min_02.nc - $CDO -f nc -smooth9 cur_5min_02.nc cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc + ${CDO} -f nc -smooth9 "cur_5min_01.nc" "cur_5min_02.nc" + ${CDO} -f nc -smooth9 "cur_5min_02.nc" "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" else - mv cur_5min_01.nc cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc + mv "cur_5min_01.nc" "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" fi # Cleanup -rm -f cur_temp[123].nc cur_5min_??.nc cur_glo_uv_${PDY}_${fext}${fh3}.nc weights.nc +rm -f cur_temp[123].nc cur_5min_??.nc "cur_glo_uv_${PDY}_${fext}${fh3}.nc weights.nc" -if [ ${flagfirst} = "T" ] +if [ ${flagfirst} = "T" ] then sed -e "s/HDRFL/T/g" ${PARMwave}/ww3_prnc.cur.${WAVECUR_FID}.inp.tmpl > ww3_prnc.inp else @@ -70,8 +71,8 @@ else fi rm -f cur.nc -ln -s cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc cur.nc -ln -s ${DATA}/mod_def.${WAVECUR_FID} ./mod_def.ww3 +ln -s "cur_glo_uv_${PDY}_${fext}${fh3}_5min.nc" "cur.nc" +ln -s "${DATA}/mod_def.${WAVECUR_FID}" ./mod_def.ww3 export pgm=ww3_prnc;. prep_step $EXECwave/ww3_prnc 1> prnc_${WAVECUR_FID}_${ymdh_rtofs}.out 2>&1 @@ -88,11 +89,10 @@ then echo '******************************************** ' echo ' ' set $seton - postmsg "$jlogfile" "WARNING: NON-FATAL ERROR IN ww3_prnc." + echo "WARNING: NON-FATAL ERROR IN ww3_prnc." exit 4 fi mv -f current.ww3 ${DATA}/${WAVECUR_DID}.${ymdh_rtofs} cd ${DATA} - diff --git a/ush/wave_prnc_ice.sh b/ush/wave_prnc_ice.sh index bd46bbcb69..a32a2b7e43 100755 --- a/ush/wave_prnc_ice.sh +++ b/ush/wave_prnc_ice.sh @@ -1,4 +1,5 @@ -#!/bin/sh +#! /usr/bin/env bash + ############################################################################### # # # This script preprocesses ice fields for the ocean wave models. # @@ -25,13 +26,13 @@ # # --------------------------------------------------------------------------- # # 0. Preparations + +source "$HOMEgfs/ush/preamble.sh" + # 0.a Basic modes of operation cd $DATA - seton='-xa' - setoff='+xa' - set $seton - + rm -rf ice mkdir ice cd ice @@ -40,33 +41,33 @@ # 0.b Define directories and the search path. # The tested variables should be exported by the postprocessor script. - set $setoff + set +x echo ' ' echo '+--------------------------------+' echo '! Make ice fields |' echo '+--------------------------------+' echo " Model TAG : $WAV_MOD_TAG" - echo " Model ID : ${CDUMP}wave" + echo " Model ID : ${RUN}wave" echo " Ice grid ID : $WAVEICE_FID" echo " Ice file : $WAVICEFILE" echo ' ' - set $seton - postmsg "$jlogfile" "Making ice fields." - - if [ -z "$YMDH" ] || [ -z "$cycle" ] || \ - [ -z "$COMOUT" ] || [ -z "$FIXwave" ] || [ -z "$EXECwave" ] || \ - [ -z "$WAV_MOD_TAG" ] || [ -z "$WAVEICE_FID" ] || [ -z "$SENDCOM" ] || \ - [ -z "$COMIN_WAV_ICE" ] - then - set $setoff + set_trace + echo "Making ice fields." + + if [[ -z "${YMDH}" ]] || [[ -z "${cycle}" ]] || \ + [[ -z "${COM_WAVE_PREP}" ]] || [[ -z "${FIXwave}" ]] || [[ -z "${EXECwave}" ]] || \ + [[ -z "${WAV_MOD_TAG}" ]] || [[ -z "${WAVEICE_FID}" ]] || [[ -z "${SENDCOM}" ]] || \ + [[ -z "${COM_OBS}" ]]; then + + set +x echo ' ' echo '**************************************************' echo '*** EXPORTED VARIABLES IN preprocessor NOT SET ***' echo '**************************************************' echo ' ' exit 1 - set $seton - postmsg "$jlogfile" "NON-FATAL ERROR - EXPORTED VARIABLES IN preprocessor NOT SET" + set_trace + echo "NON-FATAL ERROR - EXPORTED VARIABLES IN preprocessor NOT SET" fi # 0.c Links to working directory @@ -77,7 +78,7 @@ # 1. Get the necessary files # 1.a Copy the ice data file - file=${COMIN_WAV_ICE}/${WAVICEFILE} + file=${COM_OBS}/${WAVICEFILE} if [ -f $file ] then @@ -86,18 +87,18 @@ if [ -f ice.grib ] then - set $setoff + set +x echo " ice.grib copied ($file)." - set $seton + set_trace else - set $setoff + set +x echo ' ' echo '************************************** ' echo "*** FATAL ERROR: NO ICE FILE $file *** " echo '************************************** ' echo ' ' - set $seton - postmsg "$jlogfile" "FATAL ERROR - NO ICE FILE (GFS GRIB)" + set_trace + echo "FATAL ERROR - NO ICE FILE (GFS GRIB)" exit 2 fi @@ -105,9 +106,9 @@ # 2. Process the GRIB packed ice file # 2.a Unpack data - set $setoff + set +x echo ' Extracting data from ice.grib ...' - set $seton + set_trace $WGRIB2 ice.grib -netcdf icean_5m.nc 2>&1 > wgrib.out @@ -117,14 +118,14 @@ if [ "$err" != '0' ] then cat wgrib.out - set $setoff + set +x echo ' ' echo '**************************************** ' echo '*** ERROR IN UNPACKING GRIB ICE FILE *** ' echo '**************************************** ' echo ' ' - set $seton - postmsg "$jlogfile" "ERROR IN UNPACKING GRIB ICE FILE." + set_trace + echo "ERROR IN UNPACKING GRIB ICE FILE." exit 3 fi @@ -135,10 +136,10 @@ # 2.d Run through preprocessor wave_prep - set $setoff + set +x echo ' Run through preprocessor ...' echo ' ' - set $seton + set_trace cp -f ${DATA}/ww3_prnc.ice.$WAVEICE_FID.inp.tmpl ww3_prnc.inp @@ -150,14 +151,14 @@ if [ "$err" != '0' ] then cat prnc_${WAVEICE_FID}_${cycle}.out - set $setoff + set +x echo ' ' echo '******************************************** ' echo '*** WARNING: NON-FATAL ERROR IN ww3_prnc *** ' echo '******************************************** ' echo ' ' - set $seton - postmsg "$jlogfile" "WARNING: NON-FATAL ERROR IN ww3_prnc." + set_trace + echo "WARNING: NON-FATAL ERROR IN ww3_prnc." exit 4 fi @@ -174,28 +175,20 @@ icefile=${WAV_MOD_TAG}.${WAVEICE_FID}.$cycle.ice elif [ "${WW3ATMIENS}" = "F" ] then - icefile=${CDUMP}wave.${WAVEICE_FID}.$cycle.ice + icefile=${RUN}wave.${WAVEICE_FID}.$cycle.ice fi - set $setoff - echo " Saving ice.ww3 as $COMOUT/rundata/${icefile}" - set $seton - cp ice.ww3 $COMOUT/rundata/${icefile} + set +x + echo " Saving ice.ww3 as ${COM_WAVE_PREP}/${icefile}" + set_trace + cp ice.ww3 "${COM_WAVE_PREP}/${icefile}" rm -f ice.ww3 # --------------------------------------------------------------------------- # # 4. Clean up the directory - set $setoff - echo " Removing work directory after success." - set $seton - - cd .. - rm -rf ice +cd .. - set $setoff - echo ' ' - echo 'End of waveice.sh at' - date +rm -rf ice # End of waveice.sh --------------------------------------------------------- # diff --git a/ush/wave_tar.sh b/ush/wave_tar.sh index 4b37ae9048..9264aac5f3 100755 --- a/ush/wave_tar.sh +++ b/ush/wave_tar.sh @@ -1,4 +1,5 @@ -#!/bin/bash +#! /usr/bin/env bash + ############################################################################### # # # This script tars the sectral or bulletin files into a single file and # @@ -23,18 +24,13 @@ # # --------------------------------------------------------------------------- # # 0. Preparations -# 0.a Basic modes of operation - # set execution trace prompt. ${0##*/} adds the script's basename - PS4=" \${SECONDS} ${0##*/} L\${LINENO} + " - set -x +source "$HOMEgfs/ush/preamble.sh" - # Use LOUD variable to turn on/off trace. Defaults to YES (on). - export LOUD=${LOUD:-YES}; [[ $LOUD = yes ]] && export LOUD=YES - [[ "$LOUD" != YES ]] && set -x +# 0.a Basic modes of operation cd $DATA - postmsg "$jlogfile" "Making TAR FILE" + echo "Making TAR FILE" alertName=$(echo $RUN|tr [a-z] [A-Z]) @@ -46,7 +42,7 @@ echo " ID : $1" echo " Type : $2" echo " Number of files : $3" - [[ "$LOUD" = YES ]] && set -x + set_trace # 0.b Check if type set @@ -59,8 +55,7 @@ echo '*** VARIABLES IN ww3_tar.sh NOT SET ***' echo '********************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "TYPE IN ww3_tar.sh NOT SET" + set_trace exit 1 else ID=$1 @@ -81,17 +76,15 @@ # 0.c Define directories and the search path. # The tested variables should be exported by the postprocessor script. - if [ -z "$cycle" ] || [ -z "$COMOUT" ] || [ -z "$WAV_MOD_TAG" ] || \ - [ -z "$SENDCOM" ] || [ -z "$SENDDBN" ] || [ -z "${STA_DIR}" ] - then + if [[ -z "${cycle}" ]] || [[ -z "${COM_WAVE_STATION}" ]] || [[ -z "${WAV_MOD_TAG}" ]] || \ + [[ -z "${SENDCOM}" ]] || [[ -z "${SENDDBN}" ]] || [[ -z "${STA_DIR}" ]]; then set +x echo ' ' echo '*****************************************************' echo '*** EXPORTED VARIABLES IN ww3_tar.sh NOT SET ***' echo '*****************************************************' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "EXPORTED VARIABLES IN ww3_tar.sh NOT SET" + set_trace exit 2 fi @@ -103,7 +96,7 @@ set +x echo ' ' echo ' Making tar file ...' - set -x + set_trace count=0 countMAX=5 @@ -127,8 +120,7 @@ echo '*** FATAL ERROR : TAR CREATION FAILED *** ' echo '***************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : TAR CREATION FAILED" + set_trace exit 3 fi @@ -139,7 +131,7 @@ else set +x echo ' All files not found for tar. Sleeping 10 seconds and trying again ..' - [[ "$LOUD" = YES ]] && set -x + set_trace sleep 10 count=$(expr $count + 1) fi @@ -154,8 +146,7 @@ echo '*** FATAL ERROR : TAR CREATION FAILED *** ' echo '***************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : TAR CREATION FAILED" + set_trace exit 3 fi @@ -175,8 +166,7 @@ echo '*** FATAL ERROR : SPECTRAL TAR COMPRESSION FAILED *** ' echo '***************************************************** ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : SPECTRAL TAR COMPRESSION FAILED" + set_trace exit 4 fi fi @@ -189,10 +179,10 @@ set +x echo ' ' - echo " Moving tar file ${file_name} to $COMOUT ..." - [[ "$LOUD" = YES ]] && set -x + echo " Moving tar file ${file_name} to ${COM_WAVE_STATION} ..." + set_trace - cp ${file_name} $COMOUT/station/. + cp "${file_name}" "${COM_WAVE_STATION}/." exit=$? @@ -204,8 +194,7 @@ echo '*** FATAL ERROR : TAR COPY FAILED *** ' echo '************************************* ' echo ' ' - [[ "$LOUD" = YES ]] && set -x - postmsg "$jlogfile" "FATAL ERROR : TAR COPY FAILED" + set_trace exit 4 fi @@ -213,23 +202,22 @@ then set +x echo ' ' - echo " Alerting TAR file as $COMOUT/station/${file_name}" + echo " Alerting TAR file as ${COM_WAVE_STATION}/${file_name}" echo ' ' - [[ "$LOUD" = YES ]] && set -x - $DBNROOT/bin/dbn_alert MODEL ${alertName}_WAVE_TAR $job $COMOUT/station/${file_name} + set_trace + "${DBNROOT}/bin/dbn_alert MODEL" "${alertName}_WAVE_TAR" "${job}" \ + "${COM_WAVE_STATION}/${file_name}" fi # --------------------------------------------------------------------------- # # 4. Final clean up - cd $DATA +cd $DATA - set +x; [[ "$LOUD" = YES ]] && set -v +if [[ ${KEEPDATA:-NO} == "NO" ]]; then + set -v rm -rf ${STA_DIR}/${type} set +v - - echo ' ' - echo 'End of ww3_tar.sh at' - date +fi # End of ww3_tar.sh ----------------------------------------------------- # diff --git a/util/modulefiles/gfs_util.hera b/util/modulefiles/gfs_util.hera deleted file mode 100644 index ac8a7d941c..0000000000 --- a/util/modulefiles/gfs_util.hera +++ /dev/null @@ -1,28 +0,0 @@ -#%Module##################################################### -## Module file for GFS util -############################################################# -# -# Loading required system modules -# - -module use /scratch2/NCEPDEV/nwprod/hpc-stack/libs/hpc-stack/modulefiles/stack -module load hpc/1.1.0 -module load hpc-intel/18.0.5.274 -module load hpc-impi/2018.0.4 - -module load bacio/2.4.1 -module load w3emc/2.7.3 -module load w3nco/2.4.1 -module load ip/3.3.3 -module load sp/2.3.3 -module load bufr/11.4.0 - -module load jasper/2.0.22 -module load png/1.6.35 -module load zlib/1.2.11 - -module load ncl/6.5.0 -module load gempak/7.4.2 - -export GEMINC=/apps/gempak/7.4.2/gempak/include -export GEMOLB=/apps/gempak/7.4.2/os/linux64/lib diff --git a/util/modulefiles/gfs_util.wcoss_dell_p3 b/util/modulefiles/gfs_util.wcoss_dell_p3 deleted file mode 100755 index bde874a371..0000000000 --- a/util/modulefiles/gfs_util.wcoss_dell_p3 +++ /dev/null @@ -1,22 +0,0 @@ -#%Module##################################################### -## Module file for GFS util -############################################################# -# -# Loading required system modules -# - module load ips/18.0.1.163 - module load impi/18.0.1 - module load NCL/6.4.0 - -# Loading GEMPAK module - module use -a /gpfs/dell1/nco/ops/nwprod/modulefiles/ - module load gempak/7.3.3 - -# Loading Intel-Compiled NCEP Libraries - module load bacio/2.0.3 - module load w3emc/2.4.0 - module load w3nco/2.2.0 - module load ip/3.0.2 - module load sp/2.0.3 - module load g2/3.2.0 - module load bufr/11.3.0 diff --git a/util/sorc/compile_gfs_util_wcoss.sh b/util/sorc/compile_gfs_util_wcoss.sh deleted file mode 100755 index 2fe4e2c7cf..0000000000 --- a/util/sorc/compile_gfs_util_wcoss.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/sh - -###################################################################### -# -# Build executable GFS utility for GFS V16.0.0 -# -###################################################################### - -LMOD_EXACT_MATCH=no -source ../../sorc/machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) - -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then - echo " " - echo " You are on WCOSS: $target " - echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V16.0.0 " - echo " " - echo " " - exit -else - echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." - echo " The script $0 can not continue. Aborting!" - echo " " - exit -fi -echo " " - -# Load required modules -source ../modulefiles/gfs_util.${target} -module list - -dirlist="overgridid rdbfmsua webtitle mkgfsawps" -set -x - -for dir in $dirlist -do - cd ${dir}.fd - echo "PWD: $PWD" - set +x - echo " " - echo " ### ${dir} ### " - echo " " - set -x - ./compile_${dir}_wcoss.sh - set +x - echo " " - echo " ######################################### " - echo " " - cd .. - echo "BACK TO: $PWD" -done diff --git a/util/sorc/mkgfsawps.fd/compile_mkgfsawps_wcoss.sh b/util/sorc/mkgfsawps.fd/compile_mkgfsawps_wcoss.sh deleted file mode 100755 index 7c7a5022e3..0000000000 --- a/util/sorc/mkgfsawps.fd/compile_mkgfsawps_wcoss.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/sh -LMOD_EXACT_MATCH=no -source ../../../sorc/machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) - -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then - echo " " - echo " You are on WCOSS: $target " - echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V15.0.0 " - echo " " - echo " " - exit -else - echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." - echo " The script $0 can not continue. Aborting!" - echo " " - exit -fi -echo " " - -# Load required modules -source ../../modulefiles/gfs_util.${target} -module list - -set -x - -mkdir -p ../../exec -make -f makefile.$target -make -f makefile.$target clean -mv mkgfsawps ../../exec diff --git a/util/sorc/mkgfsawps.fd/makefile b/util/sorc/mkgfsawps.fd/makefile deleted file mode 100755 index 86f3c417b1..0000000000 --- a/util/sorc/mkgfsawps.fd/makefile +++ /dev/null @@ -1,53 +0,0 @@ -SHELL=/bin/sh -# -SRCS= mkgfsawps.f - -OBJS= mkgfsawps.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort - -LDFLAGS = -IOMP5_LIB=/usrx/local/prod/intel/2018UP01/lib/intel64/libiomp5.a - -LIBS = -Xlinker --start-group ${W3NCO_LIBd} ${W3NCO_LIBd} ${IP_LIBd} ${SP_LIBd} ${BACIO_LIB4} ${IOMP5_LIB} - -CMD = mkgfsawps -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -convert big_endian -r8 -i4 -assume noold_ldout_format - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - rm -f $(OBJS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - -rm -f $(OBJS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/util/sorc/mkgfsawps.fd/makefile.hera b/util/sorc/mkgfsawps.fd/makefile.hera deleted file mode 100755 index 99052691e7..0000000000 --- a/util/sorc/mkgfsawps.fd/makefile.hera +++ /dev/null @@ -1,53 +0,0 @@ -SHELL=/bin/sh -# -SRCS= mkgfsawps.f - -OBJS= mkgfsawps.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort - -LDFLAGS = -# IOMP5_LIB=/usrx/local/prod/intel/2018UP01/lib/intel64/libiomp5.a - -LIBS = -qopenmp -Xlinker --start-group ${W3NCO_LIBd} ${W3NCO_LIBd} ${IP_LIBd} ${SP_LIBd} ${BACIO_LIB4} ${IOMP5_LIB} - -CMD = mkgfsawps -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -convert big_endian -r8 -i4 -assume noold_ldout_format - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - rm -f $(OBJS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - -rm -f $(OBJS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/util/sorc/mkgfsawps.fd/makefile.wcoss_cray b/util/sorc/mkgfsawps.fd/makefile.wcoss_cray deleted file mode 100755 index b1bd05f7e9..0000000000 --- a/util/sorc/mkgfsawps.fd/makefile.wcoss_cray +++ /dev/null @@ -1,56 +0,0 @@ -SHELL=/bin/sh -# -SRCS= mkgfsawps.f - -OBJS= mkgfsawps.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort - -LDFLAGS = -IOMP5_LIB=/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libiomp5.a - -LIBS = -Xlinker --start-group ${W3NCO_LIBd} ${W3NCO_LIBd} ${IP_LIBd} ${SP_LIBd} ${BACIO_LIB4} ${IOMP5_LIB} - -CMD = mkgfsawps -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -convert big_endian -r8 -i4 -assume noold_ldout_format - -#FFLAGS = -F -#FFLAGS = -Wf"-ez" - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - rm -f $(OBJS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - -rm -f $(OBJS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/util/sorc/mkgfsawps.fd/makefile.wcoss_dell_p3 b/util/sorc/mkgfsawps.fd/makefile.wcoss_dell_p3 deleted file mode 100755 index 86f3c417b1..0000000000 --- a/util/sorc/mkgfsawps.fd/makefile.wcoss_dell_p3 +++ /dev/null @@ -1,53 +0,0 @@ -SHELL=/bin/sh -# -SRCS= mkgfsawps.f - -OBJS= mkgfsawps.o - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort - -LDFLAGS = -IOMP5_LIB=/usrx/local/prod/intel/2018UP01/lib/intel64/libiomp5.a - -LIBS = -Xlinker --start-group ${W3NCO_LIBd} ${W3NCO_LIBd} ${IP_LIBd} ${SP_LIBd} ${BACIO_LIB4} ${IOMP5_LIB} - -CMD = mkgfsawps -PROFLIB = -lprof - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# WARNING: SIMULTANEOUSLY PROFILING AND FLOWTRACING IS NOT RECOMMENDED -FFLAGS = -O3 -g -convert big_endian -r8 -i4 -assume noold_ldout_format - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - rm -f $(OBJS) - -# Make the profiled version of the command and call it a.out.prof -# -$(CMD).prof: $(OBJS) - $(FC) -o $(LDFLAGS) $(@) $(OBJS) $(LIBS) - -rm -f $(OBJS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) $(CMD).prof - -void: clobber - -rm -f $(SRCS) makefile diff --git a/util/sorc/mkgfsawps.fd/mkgfsawps.f b/util/sorc/mkgfsawps.fd/mkgfsawps.f deleted file mode 100755 index 4e4e57db3c..0000000000 --- a/util/sorc/mkgfsawps.fd/mkgfsawps.f +++ /dev/null @@ -1,511 +0,0 @@ - PROGRAM MKGFSAWPS -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C . . . . -C MAIN PROGRAM: MKGFSAWPS -C PRGMMR: VUONG ORG: NP11 DATE: 2004-04-21 -C -C ABSTRACT: PROGRAM READS GRIB FILE FROM SPECTRAL MODEL WITH 0.5 DEGREE -C (GRID 4) OR 1 DEGREE (GRID 3) OR 2.5 DEGREE (GRID 2) RECORDS. -C UNPACKS THEM, AND CAN MAKE AWIPS GRIB GRIDS 201,202, 203, -C 204, 211, 213 and 225. THEN, ADD A TOC FLAG FIELD SEPARATOR -C BLOCK AND WMO HEADER IN FRONT OF EACH GRIB FIELD, AND WRITES -C THEM OUT TO A NEW FILE. THE OUTPUT FILE IS IN THE FORMAT -C REQUIRED FOR TOC'S FTP INPUT SERVICE, WHICH CAN BE USED TO -C DISSEMINATE THE GRIB BULLETINS. -C -C PROGRAM HISTORY LOG: -C 2004-04-21 VUONG -C 2010-05-27 VUONG INCREASED SIZE OF ARRAYS -C -C USAGE: -C INPUT FILES: -C 5 - STANDARD FORTRAN INPUT FILE. -C 11 - GRIB FILE FROM SPECTRAL MODEL WITH GRID 2 OR 3. -C 31 - CRAY GRIB INDEX FILE FOR FILE 11 -C PARM - PASS IN 4 CHARACTERS 'KWBX' WITH PARM FIELD -C -C OUTPUT FILES: (INCLUDING SCRATCH FILES) -C 6 - STANDARD FORTRAN PRINT FILE -C 51 - AWIPS GRIB GRID TYPE 201,202,203,211,213 and 225 RECORDS -C MADE FROM GRIB GRID 2, 3 OR 4 RECORDS. -C -C SUBPROGRAMS CALLED: (LIST ALL CALLED FROM ANYWHERE IN CODES) -C UNIQUE: - MAKWMO -C LIBRARY: -C W3LIB - W3AS00 IW3PDS W3FP11 W3UTCDAT -C W3FI63 W3FI72 W3FI83 W3TAGB GETGB GETGBP -C BACIO - BAREAD BAOPENR BAOPENW BACLOSE -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN -C 10 - ERROR OPENING INPUT GRIB DATA FILE -C 18 - ERROR READING CONTROL CARD FILE -C 19 - ERROR READING CONTROL CARD FILE -C 20 - ERROR OPENING OUTPUT GRIB FILE -C 30 - BULLETINS ARE MISSING -C -C REMARKS: LIST CAVEATS, OTHER HELPFUL HINTS OR INFORMATION -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN 90 -C - PARAMETER (MXSIZE=2000000,MXSIZ3=MXSIZE*3) - PARAMETER (LUGI=31,LUGB=11,LUGO=51) - PARAMETER (LENHEAD=21) -C - REAL FLDI(MXSIZE) - REAL FLDV(MXSIZE) - REAL FLDO(MXSIZE),FLDVO(MXSIZE) - REAL RLAT(MXSIZE),RLON(MXSIZE) - REAL CROT(MXSIZE),SROT(MXSIZE) -C - INTEGER D(20) - INTEGER IFLD(MXSIZE) - INTEGER IBDSFL(12) - INTEGER IBMAP(MXSIZE) - INTEGER IDAWIP(200) - INTEGER JGDS(100) - INTEGER MPDS(25) - INTEGER,DIMENSION(8):: ITIME=(/0,0,0,-500,0,0,0,0/) - INTEGER KGDS(200),KGDSO(200) - INTEGER KPDS(25) - INTEGER MAPNUM(20) - INTEGER NBITS(20) - INTEGER NPARM - INTEGER NBUL - INTEGER PUNUM - INTEGER IPOPT(20) - INTEGER,DIMENSION(28):: HEXPDS -C - CHARACTER * 6 BULHED(20) - CHARACTER * 100 CPARM - CHARACTER * 17 DESC - CHARACTER * 3 EOML - CHARACTER * 1 GRIB(MXSIZ3) - CHARACTER * 1 KBUF(MXSIZ3) - CHARACTER * 4 KWBX - CHARACTER * 2 NGBFLG - CHARACTER * 1 PDS(28),GDS(400) - CHARACTER * 1 PDSL(28) - CHARACTER * 1 PDSAWIP(28) - CHARACTER * 132 TITLE - CHARACTER * 1 WMOHDR(21) - CHARACTER * 1 WFLAG - CHARACTER * 6 ENVVAR - CHARACTER * 80 FIlEB,FILEI,FILEO - CHARACTER * 1 CSEP(80) -C - LOGICAL IW3PDS - LOGICAL*1 KBMS(MXSIZE),KBMSO(MXSIZE) -C - SAVE -C - DATA IBDSFL/ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0/ - DATA IP/0/,IPOPT/1,19*0/ - DATA HEXPDS /28*0/ - DATA KM/1/ -C - CALL W3TAGB('MKGFSAWIPS',2004,0112,0112,'NP11') -C -C READ GRIB DATA AND INDEX FILE NAMES FROM THE FORT -C ENVIRONMENT VARIABLES, AND OPEN THE FILES. -C - ENVVAR='FORT ' - WRITE(ENVVAR(5:6),FMT='(I2)') LUGB - CALL GETENV(ENVVAR,FILEB) - WRITE(ENVVAR(5:6),FMT='(I2)') LUGI - CALL GETENV(ENVVAR,FILEI) - - CALL BAOPENR(LUGB,FILEB,IRET1) - IF ( IRET1 .NE. 0 ) THEN - WRITE(6,FMT='(" ERROR OPENING GRIB FILE: ",A80)') FILEB - WRITE(6,FMT='(" BAOPENR ERROR = ",I5)') IRET1 - STOP 10 - ENDIF - - CALL BAOPENR(LUGI,FILEI,IRET2) - IF ( IRET2 .NE. 0 ) THEN - WRITE(6,FMT='(" ERROR OPENING GRIB FILE: ",A80)') FILEB - WRITE(6,FMT='(" BAOPENR ERROR = ",I5)') IRET2 - STOP 10 - ENDIF -C -C READ OUTPUT GRIB BULLETIN FILE NAME FROM FORT -C ENVIRONMENT VARIABLE, AND OPEN FILE. -C - ENVVAR='FORT ' - WRITE(ENVVAR(5:6),FMT='(I2)') LUGO - CALL GETENV(ENVVAR,FILEO) - CALL BAOPENW(LUGO,FILEO,IRET3) - IF ( IRET3 .NE. 0 ) THEN - WRITE(6,FMT='(" ERROR OPENING OUTPUT GRIB FILE: ",A80)') FILEB - WRITE(6,FMT='(" BAOPENW ERROR = ",I5)') IRET3 - STOP 20 - ENDIF -C -C GET PARM FIELD WITH UP TO 100 CHARACTERS -C - CPARM = ' ' - KWBX = 'KWBC' - CALL W3AS00(NPARM,CPARM,IER) - IF (IER.EQ.0) THEN - IF (NPARM.EQ.0.OR.CPARM(1:4).EQ.' ') THEN - PRINT *,'THERE IS A PARM FIELD BUT IT IS EMPTY' - PRINT *,'OR BLANK, I WILL USE THE DEFAULT KWBC' - ELSE - KWBX(1:4) = CPARM(1:4) - END IF - ELSE IF (IER.EQ.2.OR.IER.EQ.3) THEN - PRINT *,'W3AS00 ERROR = ',IER - PRINT *,'THERE IS NO PARM FIELD, I USED DEFAULT KWBC' - ELSE - PRINT *,'W3AS00 ERROR = ',IER - END IF - PRINT *,'NPARM = ',NPARM - PRINT *,'CPARM = ',CPARM(1:4) - PRINT *,'KWBX = ',KWBX(1:4) -C - IRET = 0 - IOPT = 2 - INSIZE = 19 - NBUL = 0 - NGBSUM = 0 -C - CALL W3UTCDAT (ITIME) -C -C LOOP TO READ UNPACKED GRIB DATA -C 28 BYTE PDS AND 65160 FLOATING POINT NUMBERS -C - NREC = 0 - DO 699 IREAD = 1,1000 - READ (*,66,END=800) (HEXPDS(J),J=1,12), - & (HEXPDS(J),J=17,20), PUNUM, NGBFLG, DESC - 66 FORMAT(3(2X,4Z2),3X,4Z2,6X,I3,1X,A2,1X,A17) -C -C CHARACTERS ON CONTROL CARD NOT 0-9, A-F, OR a-f -C ALL RECORD EXCEPT V-GRD ARE READ INTO ARRAY C -C -C EXIT LOOP, IF NO MORE BULLETINS IN INPUT CARDS -C - PDS=CHAR(HEXPDS) - IF (MOVA2I(PDS(1)) .EQ. 255) EXIT - NREC = NREC + 1 - WRITE (6,FMT='(''**************************************'', - & ''************************************************'')') - PRINT *,'START NEW RECORD NO. = ',NREC - WRITE (6,FMT='('' INPUT PDS, PUNUM, NGBFLG'', - & '' & DESC...DESIRED GRIB MAPS LISTED ON FOLLOWING '', - & ''LINES...'',/,4X,3(2X,4Z2.2),3X,4Z2.2,6X,I3,1X,A2, - & 1X,A17)') (HEXPDS(J),J=1,12), - & (HEXPDS(J),J=17,20), PUNUM, NGBFLG, DESC -C -C READ IN GRIDS TO INTERPOLATE TO -C - NGB = 0 - DO J = 1,20 - READ (*,END=710,FMT='(4X,I3,2X,I2,2X,A6,1X,I3,24X,A3)') - & MAPNUM(J),NBITS(J), BULHED(J), D(J), EOML - WRITE (6,FMT='(4X,I3,2X,I2,2X,A6,1X,I3,24X,A3)') - & MAPNUM(J),NBITS(J), BULHED(J), D(J), EOML - NGB = J - IF (EOML .EQ. 'EOM') EXIT - ENDDO -C - NGBSUM = NGBSUM + NGB - JREW = 0 - MPDS = -1 - JGDS = -1 - MPDS(3) = MOVA2I(PDS(7)) - MPDS(5) = MOVA2I(PDS(9)) - WFLAG = ' ' - IF (MPDS(5).EQ.33) THEN - WFLAG = 'U' - ELSE IF (MPDS(5).EQ.34) THEN - WFLAG = 'V' - END IF - MPDS(6) = MOVA2I(PDS(10)) - MPDS(7) = MOVA2I(PDS(11)) * 256 + MOVA2I(PDS(12)) - IF (MPDS(5).EQ.61.OR.MPDS(5).EQ.62.OR. - & MPDS(5).EQ.63) THEN - MPDS(14) = MOVA2I(PDS(19)) - MPDS(15) = MOVA2I(PDS(20)) - END IF -C -C PRINT *,'CHECK POINT BEFORE GETGB' -C IF YOU GET U-GRD, ALSO READ V-GRD INTO ARRAY FLDV -C ALL RECORD EXCEPT V-GRD ARE READ INTO ARRAY FLDI -C IF YOU GET V-GRD, READ INTO ARRAY FLDV, READ U-GRD INTO FLDI -C - IF (WFLAG.EQ.'V') MPDS(5) = 33 - CALL GETGB(LUGB,LUGI,MXSIZE,JREW,MPDS,JGDS, - & MI,KREW,KPDS,KGDS,KBMS,FLDI,IRET) - CALL GETGBP(LUGB,LUGI,MXSIZ3,KREW-1,MPDS,JGDS, - & KBYTES,KREW,KPDS,KGDS,GRIB,IRET) - IF (IRET.NE.0) THEN - IF (IRET.LT.96) PRINT *,'GETGB-W3FI63: ERROR = ',IRET - IF (IRET.EQ.96) PRINT *,'GETGB: ERROR READING INDEX FILE' - IF (IRET.EQ.97) PRINT *,'GETGB: ERROR READING GRIB FILE' - IF (IRET.EQ.98) THEN - PRINT *,'GETGB ERROR: NUM. OF DATA POINTS GREATER THAN JF' - END IF - IF (IRET.EQ.99) PRINT *,'GETGB ERROR: REQUEST NOT FOUND' - IF (IRET.GT.99) PRINT *,'GETGB ERROR = ',IRET - GO TO 699 - END IF - PDSL(1:28)=GRIB(9:36) - IBI=MOD(KPDS(4)/64,2) - IF (WFLAG.EQ.'U') THEN - CALL W3FP11 (GRIB,PDSL,TITLE,IER) -C -C COMPARE RECORD (GRIB) TO CONTROL CARD (PDS), THEY SHOULD MATCH -C - KEY = 2 - IF (.NOT.IW3PDS(PDSL,PDS,KEY)) THEN - PRINT 2900, IREAD, (MOVA2I(PDSL(J)),J=1,28), - * (MOVA2I(PDS(J)),J=1,28) - GO TO 699 - END IF - END IF -C -C READ V-GRD INTO ARRAY FLDV -C - IF (WFLAG.EQ.'U'.OR.WFLAG.EQ.'V') THEN - MPDS(5) = 34 - CALL GETGB(LUGB,LUGI,MXSIZE,JREW,MPDS,JGDS, - & MI,KREW,KPDS,KGDS,KBMS,FLDV,JRET) - CALL GETGBP(LUGB,LUGI,MXSIZ3,KREW-1,MPDS,JGDS, - & KBYTES,KREW,KPDS,KGDS,GRIB,JRET) - IF (JRET.NE.0) THEN - IF (JRET.LT.96) PRINT *,'GETGB-W3FI63: ERROR = ',JRET - IF (JRET.EQ.96) PRINT *,'GETGB: ERROR READING INDEX FILE' - IF (JRET.EQ.97) PRINT *,'GETGB: ERROR READING GRIB FILE' - IF (JRET.EQ.98) THEN - PRINT *,'GETGB ERROR: NUM. OF DATA POINTS GREATER THAN JF' - END IF - IF (JRET.EQ.99) PRINT *,'GETGB ERROR: REQUEST NOT FOUND' - IF (JRET.GT.99) PRINT *,'GETGB ERROR = ',JRET - GO TO 699 - END IF - IF (WFLAG.EQ.'V') THEN - CALL W3FP11 (GRIB,PDSL,TITLE,IER) - END IF - END IF - PRINT *,'RECORD NO. OF GRIB RECORD IN INPUT FILE = ',KREW -C -C COMPARE RECORD (GRIB) TO CONTROL CARD (PDS), THEY SHOULD MATCH -C - KEY = 2 - IF (WFLAG.EQ.' '.OR.WFLAG.EQ.'V') THEN - PDSL(1:28)=GRIB(9:36) - IF (.NOT.IW3PDS(PDSL,PDS,KEY)) THEN - PRINT 2900, IREAD, (MOVA2I(PDSL(J)),J=1,28), - * (MOVA2I(PDS(J)),J=1,28) -2900 FORMAT ( 1X,I4,' (PDS) IN RECORD DOES NOT MATCH (PDS) IN ', - & 'CONTROL CARD ',/,7(1X,4Z2.2), /,7(1X,4Z2.2)) - GO TO 699 - END IF - END IF -C - PRINT 2, (MOVA2I(PDSL(J)),J=1,28) - 2 FORMAT (' PDS = ',7(4Z2.2,1X)) -C - IF (WFLAG.EQ.' ') THEN - CALL W3FP11 (GRIB,PDSL,TITLE,IER) - END IF - IF (IER.NE.0) PRINT *,'W3FP11 ERROR = ',IER - PRINT *,TITLE(1:86) -C -C MASK OUT ZERO PRECIP GRIDPOINTS BEFORE INTERPOLATION -C - IF (MPDS(5).EQ.61.OR.MPDS(5).EQ.62.OR. - & MPDS(5).EQ.63) THEN - DO J=1,MI - IF ( FLDI(J).EQ.0.0 ) THEN - KBMS(J)=.FALSE. - IBI=1 - ENDIF - ENDDO - END IF -C -C PROCESS EACH GRID -C - DO 690 I = 1,NGB - - CALL MAKGDS(MAPNUM(I),KGDSO,GDS,LENGDS,IRET) - IF ( IRET.NE.0) THEN - PRINT *,' GRID ',MAPNUM(I),' NOT VALID.' - CYCLE - ENDIF - - IF (WFLAG.EQ.' ') THEN - CALL IPOLATES(IP,IPOPT,KGDS,KGDSO,MI,MXSIZE,KM,IBI,KBMS,FLDI, - * IGPTS,RLAT,RLON,IBO,KBMSO,FLDO,IRET) - ELSE - CALL IPOLATEV(IP,IPOPT,KGDS,KGDSO,MI,MXSIZE,KM,IBI,KBMS, - * FLDI,FLDV,IGPTS,RLAT,RLON,CROT,SROT, - * IBO,KBMSO,FLDO,FLDVO,IRET) - ENDIF - - IF (IRET.NE.0) THEN - PRINT *,' INTERPOLATION TO GRID ',MAPNUM(I),' FAILED.' - CYCLE - ENDIF - - IF (WFLAG.EQ.'V') THEN - FLDO=FLDVO - ENDIF -C -C CALL W3FI69 TO UNPACK PDS INTO 25 WORD INTEGER ARRAY -C - CALL W3FI69(PDSL,IDAWIP) -C -C CHANGE MODEL NUMBER AND GRID TYPE -C - IDAWIP(5) = MAPNUM(I) - IF (WFLAG.EQ.'U') IDAWIP(8) = 33 - IF (WFLAG.EQ.'V') IDAWIP(8) = 34 -C -C ZERO PRECIP GRIDPOINTS WHERE MASK WAS APPLIED BEFORE INTERPOLATION -C - IF (IDAWIP(8).EQ.61.OR.IDAWIP(8).EQ.62.OR. - & IDAWIP(8).EQ.63) THEN - IF (IBO.EQ.1) THEN - DO J=1,IGPTS - IF ( .NOT.KBMSO(J) ) THEN - KBMSO(J)=.TRUE. - FLDO(J)=0.0 - ENDIF - ENDDO - END IF - END IF -C -C TEST RELATIVE HUMIDITY FOR GT THAN 100.0 AND LT 0.0 -C IF SO, RESET TO 0.0 AND 100.0 -C - IF (IDAWIP(8).EQ.52) THEN - DO J = 1,IGPTS - IF (FLDO(J).GT.100.0) FLDO(J) = 100.0 - IF (FLDO(J).LT.0.0) FLDO(J) = 0.0 - END DO - END IF -C -C SET ALL NEGATIVE ACUM PCP VALUES TO 0 -C - IF (IDAWIP(8).EQ.61.OR.IDAWIP(8).EQ.62.OR. - & IDAWIP(8).EQ.63) THEN - DO J = 1,IGPTS - IF (FLDO(J).LT.0.0) FLDO(J) = 0.0 - END DO - END IF -C -C COPY OUTPUT BITMAP FROM LOGICAL TO INTEGER ARRAY FOR W3FI72 -C - IF (IBO.EQ.1) THEN - DO J=1,IGPTS - IF (KBMSO(J)) THEN - IBMAP(J)=1 - ELSE - IBMAP(J)=0 - ENDIF - ENDDO - ELSE - IBMAP=1 - ENDIF -C -C IF D VALUE EQUAL ZERO, USE D VALUE IN 1 DEGREE INPUT RECORDS, -C ELSE USE THE D VALUE -C - IF (D(I).NE.0) THEN - IDAWIP(25) = D(I) - END IF -C -C PRINT *,'W3FT69 = ',IDAWIP -C PRINT *,'CHECK POINT AFTER W3FI69' -C - IBITL = NBITS(I) - ITYPE = 0 - IGRID = MAPNUM(I) - IPFLAG = 0 - IGFLAG = 0 - IBFLAG = 0 - ICOMP = 0 - IBLEN = IGPTS - JERR = 0 -C -C GRIB AWIPS GRID 37-44 -C -C PRINT *,'CHECK POINT BEFORE W3FI72' - CALL W3FI72(ITYPE,FLDO,IFLD,IBITL, - & IPFLAG,IDAWIP,PDSAWIP, - & IGFLAG,IGRID,KGDSO,ICOMP, - & IBFLAG,IBMAP,IBLEN, - & IBDSFL, - & NPTS,KBUF,ITOT,JERR) -C PRINT *,'CHECK POINT AFTER W3FI72' - IF (JERR.NE.0) PRINT *,' W3FI72 ERROR = ',JERR - PRINT *,'NPTS, ITOT = ',NPTS,ITOT - PRINT 2, (MOVA2I(PDSAWIP(J)),J=1,28) -C -C PRINT *,'SIZE OF GRIB FIELD = ',ITOT -C -C MAKE FLAG FIELD SEPARATOR BLOCK -C - CALL MKFLDSEP(CSEP,IOPT,INSIZE,ITOT+LENHEAD,LENOUT) -C -C MAKE WMO HEADER -C - CALL MAKWMO (BULHED(I),KPDS(10),KPDS(11),KWBX,WMOHDR) -C -C WRITE OUT SEPARATOR BLOCK, ABBREVIATED WMO HEADING, -C - CALL WRYTE(LUGO,LENOUT,CSEP) - CALL WRYTE(LUGO,LENHEAD,WMOHDR) - CALL WRYTE(LUGO,ITOT,KBUF) - NBUL = NBUL + 1 - 690 CONTINUE -C - 699 CONTINUE -C-------------------------------------------------------------- -C -C CLOSING SECTION -C - 800 CONTINUE - IF (NBUL .EQ. 0 .AND. NUMFLD .EQ. 0) THEN - WRITE (6,FMT='('' SOMETHING WRONG WITH DATA CARDS...'', - & ''NOTHING WAS PROCESSED'')') - CALL W3TAGE('MKGFSAWPS') - STOP 19 - ELSE - CALL BACLOSE (LUGB,IRET) - CALL BACLOSE (LUGI,IRET) - CALL BACLOSE (LUGO,IRET) - WRITE (6,FMT='(//,'' ******** RECAP OF THIS EXECUTION '', - & ''********'',/,5X,''READ '',I6,'' INDIVIDUAL IDS'', - & /,5X,''WROTE '',I6,'' BULLETINS OUT FOR TRANSMISSION'', - & //)') NREC, NBUL -C -C TEST TO SEE IF ANY BULLETINS MISSING -C - MBUL = 0 - MBUL = NGBSUM - NBUL - IF (MBUL.NE.0) THEN - PRINT *,'BULLETINS MISSING = ',MBUL - CALL W3TAGE('MKGFSAWPS') - STOP 30 - END IF -C - CALL W3TAGE('MKGFSAWPS') - STOP - ENDIF -C -C ERROR MESSAGES -C - 710 CONTINUE - WRITE (6,FMT='('' ?*?*? CHECK DATA CARDS... READ IN '', - & ''GRIB PDS AND WAS EXPECTING GRIB MAP CARDS TO FOLLOW.'',/, - & '' MAKE SURE NGBFLG = ZZ OR SUPPLY '', - & ''SOME GRIB MAP DEFINITIONS!'')') - CALL W3TAGE('MKGFSAWPS') - STOP 18 -C - END diff --git a/util/sorc/overgridid.fd/compile_overgridid_wcoss.sh b/util/sorc/overgridid.fd/compile_overgridid_wcoss.sh deleted file mode 100755 index 3c55640367..0000000000 --- a/util/sorc/overgridid.fd/compile_overgridid_wcoss.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/sh - -###################################################################### -# -# Build executable : GFS utilities -# -###################################################################### - -LMOD_EXACT_MATCH=no -source ../../../sorc/machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) - -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then - echo " " - echo " You are on $target " - echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V15.0.0 " - echo " " - echo " " - exit -else - echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." - echo " The script $0 can not continue. Aborting!" - echo " " - exit -fi -echo " " - -# Load required modules -source ../../modulefiles/gfs_util.${target} -module list - -set -x - -mkdir -p ../../exec -make -mv overgridid ../../exec -make clean diff --git a/util/sorc/overgridid.fd/makefile b/util/sorc/overgridid.fd/makefile deleted file mode 100755 index 7141872bc1..0000000000 --- a/util/sorc/overgridid.fd/makefile +++ /dev/null @@ -1,8 +0,0 @@ -LIBS = ${W3NCO_LIB4} ${BACIO_LIB4} -OBJS= overgridid.o -overgridid: overgridid.f - ifort -o overgridid overgridid.f $(LIBS) -clean: - -rm -f $(OBJS) - - diff --git a/util/sorc/overgridid.fd/overgridid.f b/util/sorc/overgridid.fd/overgridid.f deleted file mode 100755 index 29b0080bf6..0000000000 --- a/util/sorc/overgridid.fd/overgridid.f +++ /dev/null @@ -1,59 +0,0 @@ - program overgridid -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C -C MAIN PROGRAM: OVERGRIDID REPLACE iGRID ID IN A GRIB FILE -C PRGMMR: VUONG ORG: NP23 DATE: 2014-05-21 -C -C ABSTRACT: THIS PROGRAM READS AN ENTIRE GRIB FILE FROM UNIT 11 -C AND WRITES IT BACK OUT TO UNIT 51, REPLACING THE INTERNAL -C GRID ID WITH THE GRID ID READ IN VIA UNIT 5. -C -C PROGRAM HISTORY LOG: -C 1998-01-01 IREDELL -C 1998-06-17 FARLEY MODIFIED OVERDATE ROUTINE -C 1999-05-24 Gilbert - added calls to BAOPEN. -C 2014-05-21 Vuong - Modified to change grid id in a grib file -C -C INPUT FILES: -C UNIT 5 2-DIGIT MODEL ID (in base 10) -C UNIT 11 INPUT GRIB FILE = "fort.11" -C -C OUTPUT FILES: -C UNIT 51 OUTPUT GRIB FILE = "fort.51" -C -C SUBPROGRAMS CALLED: -C SKGB - Find next GRIB field -C BAREAD - Read GRIB field -C WRYTE - Read GRIB field -C -C REMARKS: -C ANY NON-GRIB INFORMATION IN THE INPUT GRIB FILE WILL BE LOST. -C AN OUTPUT LINE WILL BE WRITTEN FOR EACH GRIB MESSAGE COPIED. -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN 90 -C -C$$$ - parameter(msk1=32000,msk2=4000,mgrib=10000000) - character cgrib(mgrib) -C - read *,id ! grid id, ie 03 for 1.0 deg grib - call baopenr(11,"fort.11",iret1) - call baopenw(51,"fort.51",iret2) -C - n=0 - iseek=0 - call skgb(11,iseek,msk1,lskip,lgrib) - dowhile(lgrib.gt.0.and.lgrib.le.mgrib) - call baread(11,lskip,lgrib,ngrib,cgrib) - if(ngrib.ne.lgrib) call exit(2) - n=n+1 - id0=mova2i(cgrib(8+7)) - cgrib(8+7)=char(id) - call wryte(51,lgrib,cgrib) - print '("msg",i6,4x,"len",i8,4x,"was",i4.2,4x,"now",i4.2)', - & n,lgrib,id0,id - iseek=lskip+lgrib - call skgb(11,iseek,msk2,lskip,lgrib) - enddo - end diff --git a/util/sorc/overgridid.fd/sample.script b/util/sorc/overgridid.fd/sample.script deleted file mode 100755 index fdfd931600..0000000000 --- a/util/sorc/overgridid.fd/sample.script +++ /dev/null @@ -1,13 +0,0 @@ -# THIS SCRIPT READ A FORECAST FILE (UNIT 11), MODIFIES PDS OCTET(8) -# TO CORRECT THE GRIB GRID ID AND RE-WRITES THE FILE TO UNIT 51. - -# STANDARD INPUT IS A 3-DIGIT INTEGER, FOR EXAMPLE 255 (User define grid) - -ln -s master.grbf06 fort.11 - -overgridid << EOF -255 -EOF - -mv fort.51 master.grbf06.new -rm fort.11 diff --git a/util/sorc/rdbfmsua.fd/MAPFILE b/util/sorc/rdbfmsua.fd/MAPFILE deleted file mode 100755 index 19e0decd71..0000000000 --- a/util/sorc/rdbfmsua.fd/MAPFILE +++ /dev/null @@ -1,4045 +0,0 @@ -Archive member included because of file (symbol) - -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) - rdbfmsua.o (fl_clos_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) (fl_flun_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) - rdbfmsua.o (fl_tbop_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) (fl_tdat_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) (fl_tinq_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stldsp.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) (st_ldsp_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlstr.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) (st_lstr_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmbl.o) - rdbfmsua.o (st_rmbl_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmst.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) (st_rmst_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) - rdbfmsua.o (tb_rstn_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flbksp.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) (fl_bksp_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) (fl_inqr_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flpath.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) (fl_path_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) (fl_sopn_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) (ss_envr_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssgsym.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) (ss_gsym_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlcuc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) (st_lcuc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stuclc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) (st_uclc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbastn.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) (tb_astn_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flglun.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) (fl_glun_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libbridge.a(dcbsrh.o) - rdbfmsua.o (dc_bsrh_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ireadns.o) - rdbfmsua.o (ireadns_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - rdbfmsua.o (openbf_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (posapn_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (posapx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) (rdmsgw_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (readdx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ireadns.o) (readns_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) (readsb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(status.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (status_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - rdbfmsua.o (ufbint_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) (ufbrw_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) (upb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (wrdlen_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (writdx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wtstat.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (wtstat_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(adn30.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) (adn30_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (bfrini_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) (bort2_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort_exit.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) (bort_exit_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (bort_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) (conwin_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cpbfdx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) (cpbfdx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(drstpl.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) (drstpl_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (dxinit_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxmini.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) (dxmini_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) (getwin_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ibfms.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) (ibfms_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ichkstr.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) (ichkstr_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ifxy.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) (ifxy_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invcon.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) (invcon_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invwin.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) (invwin_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ipkm.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) (ipkm_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(irev.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upb.o) (irev_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupm.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) (iupm_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lmsg.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) (lmsg_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrpc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) (lstrpc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrps.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) (lstrps_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) (msgwrt_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(newwin.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) (newwin_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nmwrd.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lmsg.o) (nmwrd_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nxtwin.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) (nxtwin_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ovrbs1.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) (ovrbs1_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(padmsg.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) (padmsg_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) (pkb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) (pkbs1_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) (pkc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pktdd.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) (pktdd_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs01.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) (pkvs01_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs1.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) (pkvs1_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) (rdbfdx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) (rdcmps_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) (rdtree_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) (rdusdx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) (readmg_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) (seqsdx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) (stndrd_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) (string_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) (strnum_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strsuc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) (strsuc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) (trybump_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upbb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) (upbb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) (upc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(usrtpl.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) (usrtpl_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(capit.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) (capit_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrna.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ichkstr.o) (chrtrna_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrn.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) (chrtrn_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) (cktaba_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cnved4.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) (cnved4_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(digit.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) (digit_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) (elemdx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getlens.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) (getlens_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(gets1loc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) (gets1loc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(i4dy.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) (i4dy_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(idn30.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) (idn30_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(igetdate.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) (igetdate_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(istdesc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) (istdesc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) (iupb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupbs01.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) (iupbs01_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstchr.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) (jstchr_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstnum.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) (jstnum_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstjpb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) (lstjpb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) (makestab_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(mvb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) (mvb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemock.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) (nemock_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtab.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) (nemtab_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbax.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) (nemtbax_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenuaa.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) (nenuaa_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenubd.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) (nenubd_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numbck.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) (numbck_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtab.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) (numtab_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbt.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) (openbt_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parstr.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) (parstr_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) (parusr_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) (parutg_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) (rcstpl_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) (rdmsgb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) (restd_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rsvfvm.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) (rsvfvm_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strcln.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) (strcln_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) (tabsub_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(uptdd.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) (uptdd_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdesc.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) (wrdesc_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cadn30.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) (cadn30_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chekstab.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) (chekstab_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(inctab.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) (inctab_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) (nemtbb_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbd.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) (nemtbd_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtbd.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) (numtbd_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabent.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) (tabent_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) (valx_) -/gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rjust.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) (rjust_) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o (for_rtl_finish_) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) (for_check_env_name) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) - rdbfmsua.o (for_open) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_preconnected_units_init.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) (for__preconnected_units_create) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_reentrancy.o) - rdbfmsua.o (for_set_reentrancy) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) (for_since_epoch_t) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) - rdbfmsua.o (for_stop_core) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_vm.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) (for__set_signal_ops_during_vm) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) - rdbfmsua.o (for_write_int_fmt) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_fmt.o) - rdbfmsua.o (for_write_seq_fmt) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) - rdbfmsua.o (for_write_seq_lis) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_preconnected_units_init.o) (for__aio_lub_table) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) (for__reopen_file) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio_wrap.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) (for__aio_pthread_self) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_int.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) (cvt_text_to_integer) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_f.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (cvt_vax_f_to_ieee_single) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_d.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (cvt_vax_d_to_ieee_double) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_g.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (cvt_vax_g_to_ieee_double) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cray.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (cvt_cray_to_ieee_double) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_short.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (cvt_ibm_short_to_ieee_single) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_long.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (cvt_ibm_long_to_ieee_double) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_double.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (cvt_ieee_double_to_cray) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_single.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (cvt_ieee_single_to_ibm_short) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) (for__close_default) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) (for__close_proc) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_default_io_sizes_env_init.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) (for__default_io_sizes_env_init) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_desc_item.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) (for__desc_ret_item) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) (for__io_return) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_reentrancy.o) (for_exit) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit_handler.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) (for__exit_handler) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_comp.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) (for__format_compiler) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) (for__format_value) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_get.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) (for__get_s) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_intrp_fmt.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) (for__interp_fmt) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_ldir_wfs.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) (for__wfs_table) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_lub_mgt.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) (for__acquire_lun) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_need_lf.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) (for__add_to_lf_table) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_put.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) (for__put_su) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) (for__finish_ufseq_write) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) (tbk_stack_trace) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt__globals.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_double.o) (vax_c) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_int_to_text.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) (cvt_integer_to_text) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_data_to_text.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) (cvt_data_to_text) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_log_to_text.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) (cvt_boolean_to_text_ex) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_data.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) (cvt_text_to_data) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_log.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) (cvt_text_to_boolean) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) (cvt_ieee_t_to_text_ex) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) (cvt_ieee_s_to_text_ex) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) (cvt_ieee_x_to_text_ex) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) (cvtas_a_to_s) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) (cvtas_a_to_t) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) (cvtas_s_to_a) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_t_to_a.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) (cvtas_t_to_a) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_s.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) (cvtas__nan_s) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_t.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) (cvtas__nan_t) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) (cvtas_a_to_x) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_x_to_a.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) (cvtas_x_to_a) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_x.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) (cvtas__nan_x) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_globals.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) (cvtas_pten_word) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_53.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) (cvtas_pten_t) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_64.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) (cvtas_pten_64) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_128.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) (cvtas_pten_128) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(fetestexcept.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) (fetestexcept) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) (lroundf) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_stub.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) (lround) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_ct.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_stub.o) (lround.L) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_ct.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) (lroundf.L) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_gen.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) (lroundf.A) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_gen.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_stub.o) (lround.A) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_ct.o) (__libm_error_support) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrf.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) (matherrf) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrl.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) (matherrl) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherr.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) (matherr) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ints2q.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) (__jtoq) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(qcomp.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) (__neq) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fp2q.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) (__dtoq) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(q2fp.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) (__qtof) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) (tbk_string_stack_signal_impl) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) (tbk_getPC) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(cpu_feature_disp.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) (__intel_cpu_features_init_x) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (_intel_fast_memcpy) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) (_intel_fast_memmove) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemset.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) (_intel_fast_memset) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(new_proc_init.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o (__intel_new_feature_proc_init) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_addsubq.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) (__addq) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_divq.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) (__divq) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcpy.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) (__intel_sse2_strcpy) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncpy.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) (__intel_sse2_strncpy) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strlen.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) (__intel_sse2_strlen) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strchr.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) (__intel_sse2_strchr) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncmp.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) (__intel_sse2_strncmp) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcat.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) (__intel_sse2_strcat) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncat.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) (__intel_sse2_strncat) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memcpy_pp.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) (__intel_new_memcpy) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memset_pp.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemset.o) (__intel_new_memset) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memcpy.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) (__intel_ssse3_memcpy) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memcpy.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) (__intel_ssse3_rep_memcpy) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memmove.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) (__intel_ssse3_memmove) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memmove.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) (__intel_ssse3_rep_memmove) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) (__libirc_get_msg) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_mem_ops.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memset_pp.o) (__libirc_largest_cache_size) -/opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(proc_init_utils.o) - /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(new_proc_init.o) (__intel_proc_init_ftzdazule) -/usr/lib64/libc_nonshared.a(elf-init.oS) - /usr/lib/../lib64/crt1.o (__libc_csu_fini) -/opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) (__powidf2) - -Allocating common symbols -Common symbol size file - -utgprm_ 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) -maxcmp_ 0x18 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -msgstd_ 0x1 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -thread_count_mutex 0x28 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) -reptab_ 0x64 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -stbfr_ 0x100 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) -hrdwrd_ 0x2c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) -bitbuf_ 0x192dd8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) -usrbit_ 0x27100 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) -stcach_ 0x4844c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) -bufrmg_ 0xc354 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -msgcmp_ 0x1 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -nulbfr_ 0x80 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) -threads_in_flight_mutex - 0x28 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) -usrint_ 0x753080 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) -acmode_ 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -s01cm_ 0x7c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) -gmbdta_ 0x1c4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) -for__pthread_mutex_unlock_ptr - 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) -for__a_argv 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) -for__pthread_mutex_init_ptr - 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) -charac_ 0x804 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) -stords_ 0x1f40 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) -bufrsr_ 0xc3f8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -tabccc_ 0x10 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) -unptyp_ 0x80 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) -msgfmt_ 0x80 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) -dateln_ 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -sect01_ 0x7c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) -message_catalog 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) -tables_ 0x13d628 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) -mrgcom_ 0x10 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -padesc_ 0x14 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) -usrtmp_ 0x16e3600 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) -dxtab_ 0x300 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) -for__pthread_mutex_lock_ptr - 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) -tababd_ 0xbbe58c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) -usrstr_ 0xd0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) -msgcwd_ 0x280 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) -for__l_argc 0x4 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) -quiet_ 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) -for__aio_lub_table 0x400 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - -Discarded input sections - - .note.GNU-stack - 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - .note.GNU-stack - 0x0000000000000000 0x0 /usr/lib/../lib64/crti.o - .note.GNU-stack - 0x0000000000000000 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o - .note.GNU-stack - 0x0000000000000000 0x0 rdbfmsua.o - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stldsp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlstr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmbl.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmst.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flbksp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flpath.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssgsym.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlcuc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stuclc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbastn.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flglun.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libbridge.a(dcbsrh.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ireadns.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(status.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wtstat.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(adn30.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort_exit.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cpbfdx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(drstpl.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxmini.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ibfms.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ichkstr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ifxy.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invcon.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invwin.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ipkm.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(irev.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupm.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lmsg.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrpc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrps.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(newwin.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nmwrd.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nxtwin.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ovrbs1.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(padmsg.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pktdd.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs01.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs1.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strsuc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upbb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(usrtpl.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(capit.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrna.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrn.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cnved4.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(digit.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getlens.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(gets1loc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(i4dy.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(idn30.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(igetdate.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(istdesc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupbs01.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstchr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstnum.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstjpb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(mvb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemock.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtab.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbax.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenuaa.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenubd.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numbck.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtab.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbt.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parstr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rsvfvm.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strcln.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(uptdd.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdesc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cadn30.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chekstab.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(inctab.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbd.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtbd.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabent.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) - .note.GNU-stack - 0x0000000000000000 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rjust.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_preconnected_units_init.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_reentrancy.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_vm.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_fmt.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio_wrap.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_int.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_f.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_d.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_g.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cray.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_short.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_long.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_double.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_single.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_default_io_sizes_env_init.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_desc_item.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit_handler.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_comp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_get.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_intrp_fmt.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_ldir_wfs.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_lub_mgt.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_need_lf.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_put.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt__globals.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_int_to_text.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_data_to_text.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_log_to_text.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_data.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_log.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_t_to_a.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_s.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_t.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_x_to_a.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_x.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_globals.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_53.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_64.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_128.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(fetestexcept.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_stub.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_ct.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_ct.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_gen.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_gen.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrf.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrl.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ints2q.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(qcomp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fp2q.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(q2fp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(cpu_feature_disp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemset.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(new_proc_init.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_addsubq.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_divq.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcpy.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncpy.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strlen.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strchr.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncmp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcat.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncat.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memcpy_pp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memset_pp.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memcpy.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memcpy.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memmove.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memmove.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_mem_ops.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(proc_init_utils.o) - .note.GNU-stack - 0x0000000000000000 0x0 /usr/lib64/libc_nonshared.a(elf-init.oS) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - .note.GNU-stack - 0x0000000000000000 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - .note.GNU-stack - 0x0000000000000000 0x0 /usr/lib/../lib64/crtn.o - -Memory Configuration - -Name Origin Length Attributes -*default* 0x0000000000000000 0xffffffffffffffff - -Linker script and memory map - -LOAD /usr/lib/../lib64/crt1.o -LOAD /usr/lib/../lib64/crti.o -LOAD /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o -LOAD rdbfmsua.o -LOAD /gpfs/hps/emc/global/noscrub/Boi.Vuong/lib_sorc/decod_ut/v1.0.0/intel/libdecod_ut_v1.0.0.a -LOAD /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a -LOAD /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libappl.a -LOAD /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libsyslib.a -LOAD /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libcgemlib.a -LOAD /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libbridge.a -LOAD /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a -LOAD /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/../../../../lib64/libgfortran.so -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifport.a -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libsvml.a -LOAD /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libm.a -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libipgo.a -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a -LOAD /usr/lib/../lib64/libpthread.so -START GROUP -LOAD /lib64/libpthread.so.0 -LOAD /usr/lib64/libpthread_nonshared.a -END GROUP -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libsvml.a -LOAD /usr/lib/../lib64/libc.so -START GROUP -LOAD /lib64/libc.so.6 -LOAD /usr/lib64/libc_nonshared.a -LOAD /lib64/ld-linux-x86-64.so.2 -END GROUP -LOAD /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a -LOAD /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/../../../../lib64/libgcc_s.so -LOAD /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc_s.a -LOAD /usr/lib/../lib64/libdl.so -LOAD /usr/lib/../lib64/libc.so -START GROUP -LOAD /lib64/libc.so.6 -LOAD /usr/lib64/libc_nonshared.a -LOAD /lib64/ld-linux-x86-64.so.2 -END GROUP -LOAD /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o -LOAD /usr/lib/../lib64/crtn.o - 0x0000000000400000 PROVIDE (__executable_start, 0x400000) - 0x0000000000400238 . = (0x400000 + SIZEOF_HEADERS) - -.interp 0x0000000000400238 0x1c - *(.interp) - .interp 0x0000000000400238 0x1c /usr/lib/../lib64/crt1.o - -.note.ABI-tag 0x0000000000400254 0x20 - .note.ABI-tag 0x0000000000400254 0x20 /usr/lib/../lib64/crt1.o - -.note.SuSE 0x0000000000400274 0x18 - .note.SuSE 0x0000000000400274 0x18 /usr/lib/../lib64/crt1.o - -.note.gnu.build-id - 0x000000000040028c 0x24 - *(.note.gnu.build-id) - .note.gnu.build-id - 0x000000000040028c 0x24 /usr/lib/../lib64/crt1.o - -.hash 0x00000000004002b0 0x494 - *(.hash) - .hash 0x00000000004002b0 0x494 /usr/lib/../lib64/crt1.o - -.gnu.hash 0x0000000000400748 0x170 - *(.gnu.hash) - .gnu.hash 0x0000000000400748 0x170 /usr/lib/../lib64/crt1.o - -.dynsym 0x00000000004008b8 0xf00 - *(.dynsym) - .dynsym 0x00000000004008b8 0xf00 /usr/lib/../lib64/crt1.o - -.dynstr 0x00000000004017b8 0x87d - *(.dynstr) - .dynstr 0x00000000004017b8 0x87d /usr/lib/../lib64/crt1.o - -.gnu.version 0x0000000000402036 0x140 - *(.gnu.version) - .gnu.version 0x0000000000402036 0x140 /usr/lib/../lib64/crt1.o - -.gnu.version_d 0x0000000000402178 0x0 - *(.gnu.version_d) - .gnu.version_d - 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - -.gnu.version_r 0x0000000000402178 0xd0 - *(.gnu.version_r) - .gnu.version_r - 0x0000000000402178 0xd0 /usr/lib/../lib64/crt1.o - -.rela.dyn 0x0000000000402248 0x438 - *(.rela.init) - *(.rela.text .rela.text.* .rela.gnu.linkonce.t.*) - .rela.text 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - .rela.text.ssse3 - 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - *(.rela.fini) - *(.rela.rodata .rela.rodata.* .rela.gnu.linkonce.r.*) - .rela.rodata 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - *(.rela.data .rela.data.* .rela.gnu.linkonce.d.*) - .rela.data 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - *(.rela.tdata .rela.tdata.* .rela.gnu.linkonce.td.*) - *(.rela.tbss .rela.tbss.* .rela.gnu.linkonce.tb.*) - *(.rela.ctors) - *(.rela.dtors) - *(.rela.got) - .rela.got 0x0000000000402248 0x3f0 /usr/lib/../lib64/crt1.o - *(.rela.bss .rela.bss.* .rela.gnu.linkonce.b.*) - .rela.bss 0x0000000000402638 0x48 /usr/lib/../lib64/crt1.o - *(.rela.ldata .rela.ldata.* .rela.gnu.linkonce.l.*) - *(.rela.lbss .rela.lbss.* .rela.gnu.linkonce.lb.*) - *(.rela.lrodata .rela.lrodata.* .rela.gnu.linkonce.lr.*) - *(.rela.ifunc) - -.rela.plt 0x0000000000402680 0xa80 - *(.rela.plt) - .rela.plt 0x0000000000402680 0xa80 /usr/lib/../lib64/crt1.o - 0x0000000000403100 PROVIDE (__rela_iplt_start, .) - *(.rela.iplt) - .rela.iplt 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - 0x0000000000403100 PROVIDE (__rela_iplt_end, .) - -.init 0x0000000000403100 0x18 - *(SORT(.init)) - .init 0x0000000000403100 0x9 /usr/lib/../lib64/crti.o - 0x0000000000403100 _init - .init 0x0000000000403109 0x5 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - .init 0x000000000040310e 0x5 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - .init 0x0000000000403113 0x5 /usr/lib/../lib64/crtn.o - -.plt 0x0000000000403120 0x710 - *(.plt) - .plt 0x0000000000403120 0x710 /usr/lib/../lib64/crt1.o - 0x0000000000403130 fileno@@GLIBC_2.2.5 - 0x0000000000403140 printf@@GLIBC_2.2.5 - 0x0000000000403150 _gfortran_transfer_character_write@@GFORTRAN_1.4 - 0x0000000000403160 _Unwind_GetRegionStart@@GCC_3.0 - 0x0000000000403170 memset@@GLIBC_2.2.5 - 0x0000000000403180 ftell@@GLIBC_2.2.5 - 0x0000000000403190 snprintf@@GLIBC_2.2.5 - 0x00000000004031a0 _gfortran_st_open@@GFORTRAN_1.0 - 0x00000000004031b0 posix_memalign@@GLIBC_2.2.5 - 0x00000000004031c0 _gfortran_st_write_done@@GFORTRAN_1.0 - 0x00000000004031d0 close@@GLIBC_2.2.5 - 0x00000000004031e0 abort@@GLIBC_2.2.5 - 0x00000000004031f0 ttyname@@GLIBC_2.2.5 - 0x0000000000403200 memchr@@GLIBC_2.2.5 - 0x0000000000403210 strncat@@GLIBC_2.2.5 - 0x0000000000403220 getrusage@@GLIBC_2.2.5 - 0x0000000000403230 isatty@@GLIBC_2.2.5 - 0x0000000000403240 puts@@GLIBC_2.2.5 - 0x0000000000403250 uname@@GLIBC_2.2.5 - 0x0000000000403260 fseek@@GLIBC_2.2.5 - 0x0000000000403270 exit@@GLIBC_2.2.5 - 0x0000000000403280 gettimeofday@@GLIBC_2.2.5 - 0x0000000000403290 _gfortran_st_inquire@@GFORTRAN_1.0 - 0x00000000004032a0 read@@GLIBC_2.2.5 - 0x00000000004032b0 malloc@@GLIBC_2.2.5 - 0x00000000004032c0 fopen@@GLIBC_2.2.5 - 0x00000000004032d0 __libc_start_main@@GLIBC_2.2.5 - 0x00000000004032e0 system@@GLIBC_2.2.5 - 0x00000000004032f0 unlink@@GLIBC_2.2.5 - 0x0000000000403300 siglongjmp@@GLIBC_2.2.5 - 0x0000000000403310 catgets@@GLIBC_2.2.5 - 0x0000000000403320 sysconf@@GLIBC_2.2.5 - 0x0000000000403330 getpid@@GLIBC_2.2.5 - 0x0000000000403340 catclose@@GLIBC_2.2.5 - 0x0000000000403350 fgets@@GLIBC_2.2.5 - 0x0000000000403360 __fxstat64@@GLIBC_2.2.5 - 0x0000000000403370 freopen64@@GLIBC_2.2.5 - 0x0000000000403380 free@@GLIBC_2.2.5 - 0x0000000000403390 strlen@@GLIBC_2.2.5 - 0x00000000004033a0 _gfortran_st_read_done@@GFORTRAN_1.0 - 0x00000000004033b0 vsprintf@@GLIBC_2.2.5 - 0x00000000004033c0 opendir@@GLIBC_2.2.5 - 0x00000000004033d0 __xpg_basename@@GLIBC_2.2.5 - 0x00000000004033e0 mkstemp64@@GLIBC_2.2.5 - 0x00000000004033f0 __ctype_b_loc@@GLIBC_2.3 - 0x0000000000403400 _gfortran_concat_string@@GFORTRAN_1.0 - 0x0000000000403410 sprintf@@GLIBC_2.2.5 - 0x0000000000403420 strrchr@@GLIBC_2.2.5 - 0x0000000000403430 _Unwind_GetIP@@GCC_3.0 - 0x0000000000403440 atol@@GLIBC_2.2.5 - 0x0000000000403450 _Unwind_Backtrace@@GCC_3.3 - 0x0000000000403460 sscanf@@GLIBC_2.2.5 - 0x0000000000403470 _gfortran_transfer_integer@@GFORTRAN_1.0 - 0x0000000000403480 _gfortran_st_close@@GFORTRAN_1.0 - 0x0000000000403490 _gfortran_st_backspace@@GFORTRAN_1.0 - 0x00000000004034a0 kill@@GLIBC_2.2.5 - 0x00000000004034b0 strerror@@GLIBC_2.2.5 - 0x00000000004034c0 open64@@GLIBC_2.2.5 - 0x00000000004034d0 strstr@@GLIBC_2.2.5 - 0x00000000004034e0 sigprocmask@@GLIBC_2.2.5 - 0x00000000004034f0 _gfortran_transfer_array_write@@GFORTRAN_1.4 - 0x0000000000403500 sigaction@@GLIBC_2.2.5 - 0x0000000000403510 strcat@@GLIBC_2.2.5 - 0x0000000000403520 fputs@@GLIBC_2.2.5 - 0x0000000000403530 _Unwind_ForcedUnwind@@GCC_3.0 - 0x0000000000403540 ftruncate64@@GLIBC_2.2.5 - 0x0000000000403550 readlink@@GLIBC_2.2.5 - 0x0000000000403560 _gfortran_transfer_character@@GFORTRAN_1.0 - 0x0000000000403570 memcpy@@GLIBC_2.2.5 - 0x0000000000403580 raise@@GLIBC_2.2.5 - 0x0000000000403590 signal@@GLIBC_2.2.5 - 0x00000000004035a0 _gfortran_getenv@@GFORTRAN_1.0 - 0x00000000004035b0 memmove@@GLIBC_2.2.5 - 0x00000000004035c0 strchr@@GLIBC_2.2.5 - 0x00000000004035d0 vsnprintf@@GLIBC_2.2.5 - 0x00000000004035e0 fread@@GLIBC_2.2.5 - 0x00000000004035f0 setenv@@GLIBC_2.2.5 - 0x0000000000403600 catopen@@GLIBC_2.2.5 - 0x0000000000403610 getenv@@GLIBC_2.2.5 - 0x0000000000403620 _gfortran_transfer_integer_write@@GFORTRAN_1.4 - 0x0000000000403630 _gfortran_st_write@@GFORTRAN_1.0 - 0x0000000000403640 __errno_location@@GLIBC_2.2.5 - 0x0000000000403650 strcmp@@GLIBC_2.2.5 - 0x0000000000403660 getcwd@@GLIBC_2.2.5 - 0x0000000000403670 strcpy@@GLIBC_2.2.5 - 0x0000000000403680 nanosleep@@GLIBC_2.2.5 - 0x0000000000403690 _gfortran_string_index@@GFORTRAN_1.0 - 0x00000000004036a0 dladdr@@GLIBC_2.2.5 - 0x00000000004036b0 __ctype_tolower_loc@@GLIBC_2.3 - 0x00000000004036c0 memcmp@@GLIBC_2.2.5 - 0x00000000004036d0 _gfortran_st_rewind@@GFORTRAN_1.0 - 0x00000000004036e0 _gfortran_st_read@@GFORTRAN_1.0 - 0x00000000004036f0 feof@@GLIBC_2.2.5 - 0x0000000000403700 fclose@@GLIBC_2.2.5 - 0x0000000000403710 strncpy@@GLIBC_2.2.5 - 0x0000000000403720 __xstat64@@GLIBC_2.2.5 - 0x0000000000403730 lseek64@@GLIBC_2.2.5 - 0x0000000000403740 dlsym@@GLIBC_2.2.5 - 0x0000000000403750 closedir@@GLIBC_2.2.5 - 0x0000000000403760 access@@GLIBC_2.2.5 - 0x0000000000403770 sigemptyset@@GLIBC_2.2.5 - 0x0000000000403780 _gfortran_transfer_real@@GFORTRAN_1.0 - 0x0000000000403790 fopen64@@GLIBC_2.2.5 - 0x00000000004037a0 _gfortran_compare_string@@GFORTRAN_1.0 - 0x00000000004037b0 realloc@@GLIBC_2.2.5 - 0x00000000004037c0 perror@@GLIBC_2.2.5 - 0x00000000004037d0 __sigsetjmp@@GLIBC_2.2.5 - 0x00000000004037e0 fprintf@@GLIBC_2.2.5 - 0x00000000004037f0 localtime@@GLIBC_2.2.5 - 0x0000000000403800 write@@GLIBC_2.2.5 - 0x0000000000403810 _gfortran_pow_i4_i4@@GFORTRAN_1.0 - 0x0000000000403820 fcntl@@GLIBC_2.2.5 - *(.iplt) - .iplt 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - -.text 0x0000000000403830 0xb2738 - *(.text.unlikely .text.*_unlikely) - .text.unlikely - 0x0000000000403830 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - .text.unlikely - 0x0000000000403830 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - .text.unlikely - 0x0000000000403830 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - *(.text.exit .text.exit.*) - *(.text.startup .text.startup.*) - *(.text.hot .text.hot.*) - *(.text .stub .text.* .gnu.linkonce.t.*) - .text 0x0000000000403830 0x2c /usr/lib/../lib64/crt1.o - 0x0000000000403830 _start - .text 0x000000000040385c 0x17 /usr/lib/../lib64/crti.o - *fill* 0x0000000000403873 0xd - .text 0x0000000000403880 0x116 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - *fill* 0x0000000000403996 0xa - .text 0x00000000004039a0 0x40 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o - 0x00000000004039a0 main - .text 0x00000000004039e0 0x1920 rdbfmsua.o - 0x00000000004039e0 MAIN__ - .text 0x0000000000405300 0x82 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) - 0x0000000000405300 fl_clos_ - *fill* 0x0000000000405382 0x2 - .text 0x0000000000405384 0x4d /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) - 0x0000000000405384 fl_flun_ - *fill* 0x00000000004053d1 0x3 - .text 0x00000000004053d4 0x13e /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) - 0x00000000004053d4 fl_tbop_ - *fill* 0x0000000000405512 0x2 - .text 0x0000000000405514 0x155 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) - 0x0000000000405514 fl_tdat_ - *fill* 0x0000000000405669 0x3 - .text 0x000000000040566c 0xd4a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) - 0x000000000040566c fl_tinq_ - *fill* 0x00000000004063b6 0x2 - .text 0x00000000004063b8 0x332 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stldsp.o) - 0x00000000004063b8 st_ldsp_ - *fill* 0x00000000004066ea 0x2 - .text 0x00000000004066ec 0x9b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlstr.o) - 0x00000000004066ec st_lstr_ - *fill* 0x0000000000406787 0x1 - .text 0x0000000000406788 0x28e /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmbl.o) - 0x0000000000406788 st_rmbl_ - *fill* 0x0000000000406a16 0x2 - .text 0x0000000000406a18 0x576 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmst.o) - 0x0000000000406a18 st_rmst_ - *fill* 0x0000000000406f8e 0x2 - .text 0x0000000000406f90 0x560 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) - 0x0000000000406f90 tb_rstn_ - .text 0x00000000004074f0 0x6a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flbksp.o) - 0x00000000004074f0 fl_bksp_ - *fill* 0x000000000040755a 0x2 - .text 0x000000000040755c 0x5c5 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) - 0x000000000040755c fl_inqr_ - *fill* 0x0000000000407b21 0x3 - .text 0x0000000000407b24 0x31c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flpath.o) - 0x0000000000407b24 fl_path_ - .text 0x0000000000407e40 0x230 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) - 0x0000000000407e40 fl_sopn_ - .text 0x0000000000408070 0x78f /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) - 0x0000000000408070 ss_envr_ - *fill* 0x00000000004087ff 0x1 - .text 0x0000000000408800 0x13b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssgsym.o) - 0x0000000000408800 ss_gsym_ - *fill* 0x000000000040893b 0x1 - .text 0x000000000040893c 0x146 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlcuc.o) - 0x000000000040893c st_lcuc_ - *fill* 0x0000000000408a82 0x2 - .text 0x0000000000408a84 0x146 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stuclc.o) - 0x0000000000408a84 st_uclc_ - *fill* 0x0000000000408bca 0x2 - .text 0x0000000000408bcc 0xb03 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbastn.o) - 0x0000000000408bcc tb_astn_ - *fill* 0x00000000004096cf 0x1 - .text 0x00000000004096d0 0x89 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flglun.o) - 0x00000000004096d0 fl_glun_ - *fill* 0x0000000000409759 0x3 - .text 0x000000000040975c 0x181 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libbridge.a(dcbsrh.o) - 0x000000000040975c dc_bsrh_ - *fill* 0x00000000004098dd 0x3 - .text 0x00000000004098e0 0x45 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ireadns.o) - 0x00000000004098e0 ireadns_ - *fill* 0x0000000000409925 0x3 - .text 0x0000000000409928 0xe23 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - 0x0000000000409928 openbf_ - *fill* 0x000000000040a74b 0x1 - .text 0x000000000040a74c 0x10c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) - 0x000000000040a74c posapn_ - .text 0x000000000040a858 0x135 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapx.o) - 0x000000000040a858 posapx_ - *fill* 0x000000000040a98d 0x3 - .text 0x000000000040a990 0x1ff /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - 0x000000000040a990 rdmsgw_ - *fill* 0x000000000040ab8f 0x1 - .text 0x000000000040ab90 0x3e9 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) - 0x000000000040ab90 readdx_ - *fill* 0x000000000040af79 0x3 - .text 0x000000000040af7c 0x12a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - 0x000000000040af7c readns_ - *fill* 0x000000000040b0a6 0x2 - .text 0x000000000040b0a8 0x390 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - 0x000000000040b0a8 readsb_ - .text 0x000000000040b438 0x223 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(status.o) - 0x000000000040b438 status_ - *fill* 0x000000000040b65b 0x1 - .text 0x000000000040b65c 0xfd2 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - 0x000000000040b65c ufbint_ - *fill* 0x000000000040c62e 0x2 - .text 0x000000000040c630 0xab2 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) - 0x000000000040c630 ufbrw_ - *fill* 0x000000000040d0e2 0x2 - .text 0x000000000040d0e4 0x1b4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upb.o) - 0x000000000040d0e4 upb_ - .text 0x000000000040d298 0x1276 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - 0x000000000040d298 wrdlen_ - *fill* 0x000000000040e50e 0x2 - .text 0x000000000040e510 0xa79 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - 0x000000000040e510 writdx_ - *fill* 0x000000000040ef89 0x3 - .text 0x000000000040ef8c 0x4ff /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wtstat.o) - 0x000000000040ef8c wtstat_ - *fill* 0x000000000040f48b 0x1 - .text 0x000000000040f48c 0x484 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(adn30.o) - 0x000000000040f48c adn30_ - .text 0x000000000040f910 0x638 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - 0x000000000040f910 bfrini_ - .text 0x000000000040ff48 0x26d /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) - 0x000000000040ff48 bort2_ - *fill* 0x00000000004101b5 0x3 - .text 0x00000000004101b8 0xe /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort_exit.o) - 0x00000000004101b8 bort_exit_ - *fill* 0x00000000004101c6 0x2 - .text 0x00000000004101c8 0x1e8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort.o) - 0x00000000004101c8 bort_ - .text 0x00000000004103b0 0x1f6 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) - 0x00000000004103b0 conwin_ - *fill* 0x00000000004105a6 0x2 - .text 0x00000000004105a8 0x48b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cpbfdx.o) - 0x00000000004105a8 cpbfdx_ - *fill* 0x0000000000410a33 0x1 - .text 0x0000000000410a34 0x159 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(drstpl.o) - 0x0000000000410a34 drstpl_ - *fill* 0x0000000000410b8d 0x3 - .text 0x0000000000410b90 0xa44 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) - 0x0000000000410b90 dxinit_ - .text 0x00000000004115d4 0x83e /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxmini.o) - 0x00000000004115d4 dxmini_ - *fill* 0x0000000000411e12 0x2 - .text 0x0000000000411e14 0x318 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) - 0x0000000000411e14 getwin_ - .text 0x000000000041212c 0x67 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ibfms.o) - 0x000000000041212c ibfms_ - *fill* 0x0000000000412193 0x1 - .text 0x0000000000412194 0xec /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ichkstr.o) - 0x0000000000412194 ichkstr_ - .text 0x0000000000412280 0x107 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ifxy.o) - 0x0000000000412280 ifxy_ - *fill* 0x0000000000412387 0x1 - .text 0x0000000000412388 0x509 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invcon.o) - 0x0000000000412388 invcon_ - *fill* 0x0000000000412891 0x3 - .text 0x0000000000412894 0x279 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invwin.o) - 0x0000000000412894 invwin_ - *fill* 0x0000000000412b0d 0x3 - .text 0x0000000000412b10 0x1cf /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ipkm.o) - 0x0000000000412b10 ipkm_ - *fill* 0x0000000000412cdf 0x1 - .text 0x0000000000412ce0 0x6e /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(irev.o) - 0x0000000000412ce0 irev_ - *fill* 0x0000000000412d4e 0x2 - .text 0x0000000000412d50 0x196 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupm.o) - 0x0000000000412d50 iupm_ - *fill* 0x0000000000412ee6 0x2 - .text 0x0000000000412ee8 0x40 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lmsg.o) - 0x0000000000412ee8 lmsg_ - .text 0x0000000000412f28 0x362 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrpc.o) - 0x0000000000412f28 lstrpc_ - *fill* 0x000000000041328a 0x2 - .text 0x000000000041328c 0x362 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrps.o) - 0x000000000041328c lstrps_ - *fill* 0x00000000004135ee 0x2 - .text 0x00000000004135f0 0x995 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - 0x00000000004135f0 msgwrt_ - *fill* 0x0000000000413f85 0x3 - .text 0x0000000000413f88 0x226 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(newwin.o) - 0x0000000000413f88 newwin_ - *fill* 0x00000000004141ae 0x2 - .text 0x00000000004141b0 0x63 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nmwrd.o) - 0x00000000004141b0 nmwrd_ - *fill* 0x0000000000414213 0x1 - .text 0x0000000000414214 0x2ae /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nxtwin.o) - 0x0000000000414214 nxtwin_ - *fill* 0x00000000004144c2 0x2 - .text 0x00000000004144c4 0x331 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ovrbs1.o) - 0x00000000004144c4 ovrbs1_ - *fill* 0x00000000004147f5 0x3 - .text 0x00000000004147f8 0xd6 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(padmsg.o) - 0x00000000004147f8 padmsg_ - *fill* 0x00000000004148ce 0x2 - .text 0x00000000004148d0 0x349 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkb.o) - 0x00000000004148d0 pkb_ - *fill* 0x0000000000414c19 0x3 - .text 0x0000000000414c1c 0x44f /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) - 0x0000000000414c1c pkbs1_ - *fill* 0x000000000041506b 0x1 - .text 0x000000000041506c 0x3d9 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkc.o) - 0x000000000041506c pkc_ - *fill* 0x0000000000415445 0x3 - .text 0x0000000000415448 0x4d3 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pktdd.o) - 0x0000000000415448 pktdd_ - *fill* 0x000000000041591b 0x1 - .text 0x000000000041591c 0x2a8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs01.o) - 0x000000000041591c pkvs01_ - .text 0x0000000000415bc4 0x39d /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs1.o) - 0x0000000000415bc4 pkvs1_ - *fill* 0x0000000000415f61 0x3 - .text 0x0000000000415f64 0x14fe /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) - 0x0000000000415f64 rdbfdx_ - *fill* 0x0000000000417462 0x2 - .text 0x0000000000417464 0x519 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) - 0x0000000000417464 rdcmps_ - *fill* 0x000000000041797d 0x3 - .text 0x0000000000417980 0x40d /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) - 0x0000000000417980 rdtree_ - *fill* 0x0000000000417d8d 0x3 - .text 0x0000000000417d90 0x1989 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) - 0x0000000000417d90 rdusdx_ - *fill* 0x0000000000419719 0x3 - .text 0x000000000041971c 0x27f /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) - 0x000000000041971c readmg_ - *fill* 0x000000000041999b 0x1 - .text 0x000000000041999c 0x1da9 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) - 0x000000000041999c seqsdx_ - *fill* 0x000000000041b745 0x3 - .text 0x000000000041b748 0x984 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) - 0x000000000041b748 stndrd_ - .text 0x000000000041c0cc 0x7af /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - 0x000000000041c0cc string_ - *fill* 0x000000000041c87b 0x1 - .text 0x000000000041c87c 0x36b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) - 0x000000000041c87c strnum_ - *fill* 0x000000000041cbe7 0x1 - .text 0x000000000041cbe8 0x39b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strsuc.o) - 0x000000000041cbe8 strsuc_ - *fill* 0x000000000041cf83 0x1 - .text 0x000000000041cf84 0x2ef /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) - 0x000000000041cf84 trybump_ - *fill* 0x000000000041d273 0x1 - .text 0x000000000041d274 0x1a0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upbb.o) - 0x000000000041d274 upbb_ - .text 0x000000000041d414 0x11c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upc.o) - 0x000000000041d414 upc_ - .text 0x000000000041d530 0x1abd /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(usrtpl.o) - 0x000000000041d530 usrtpl_ - *fill* 0x000000000041efed 0x3 - .text 0x000000000041eff0 0xa6 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(capit.o) - 0x000000000041eff0 capit_ - *fill* 0x000000000041f096 0x2 - .text 0x000000000041f098 0x110 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrna.o) - 0x000000000041f098 chrtrna_ - .text 0x000000000041f1a8 0x99 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrn.o) - 0x000000000041f1a8 chrtrn_ - *fill* 0x000000000041f241 0x3 - .text 0x000000000041f244 0xfe6 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) - 0x000000000041f244 cktaba_ - *fill* 0x000000000042022a 0x2 - .text 0x000000000042022c 0x69d /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cnved4.o) - 0x000000000042022c cnved4_ - *fill* 0x00000000004208c9 0x3 - .text 0x00000000004208cc 0x6f /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(digit.o) - 0x00000000004208cc digit_ - *fill* 0x000000000042093b 0x1 - .text 0x000000000042093c 0xadb /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) - 0x000000000042093c elemdx_ - *fill* 0x0000000000421417 0x1 - .text 0x0000000000421418 0x1e4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getlens.o) - 0x0000000000421418 getlens_ - .text 0x00000000004215fc 0x57a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(gets1loc.o) - 0x00000000004215fc gets1loc_ - *fill* 0x0000000000421b76 0x2 - .text 0x0000000000421b78 0x6b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(i4dy.o) - 0x0000000000421b78 i4dy_ - *fill* 0x0000000000421be3 0x1 - .text 0x0000000000421be4 0x3f3 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(idn30.o) - 0x0000000000421be4 idn30_ - *fill* 0x0000000000421fd7 0x1 - .text 0x0000000000421fd8 0x110 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(igetdate.o) - 0x0000000000421fd8 igetdate_ - .text 0x00000000004220e8 0x14a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(istdesc.o) - 0x00000000004220e8 istdesc_ - *fill* 0x0000000000422232 0x2 - .text 0x0000000000422234 0x4b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupb.o) - 0x0000000000422234 iupb_ - *fill* 0x000000000042227f 0x1 - .text 0x0000000000422280 0x26e /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupbs01.o) - 0x0000000000422280 iupbs01_ - *fill* 0x00000000004224ee 0x2 - .text 0x00000000004224f0 0xff /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstchr.o) - 0x00000000004224f0 jstchr_ - *fill* 0x00000000004225ef 0x1 - .text 0x00000000004225f0 0x51d /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstnum.o) - 0x00000000004225f0 jstnum_ - *fill* 0x0000000000422b0d 0x3 - .text 0x0000000000422b10 0x38b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstjpb.o) - 0x0000000000422b10 lstjpb_ - *fill* 0x0000000000422e9b 0x1 - .text 0x0000000000422e9c 0x14fe /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) - 0x0000000000422e9c makestab_ - *fill* 0x000000000042439a 0x2 - .text 0x000000000042439c 0x20c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(mvb.o) - 0x000000000042439c mvb_ - .text 0x00000000004245a8 0xf2 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemock.o) - 0x00000000004245a8 nemock_ - *fill* 0x000000000042469a 0x2 - .text 0x000000000042469c 0x496 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtab.o) - 0x000000000042469c nemtab_ - *fill* 0x0000000000424b32 0x2 - .text 0x0000000000424b34 0x396 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbax.o) - 0x0000000000424b34 nemtbax_ - *fill* 0x0000000000424eca 0x2 - .text 0x0000000000424ecc 0x28a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenuaa.o) - 0x0000000000424ecc nenuaa_ - *fill* 0x0000000000425156 0x2 - .text 0x0000000000425158 0x4d2 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenubd.o) - 0x0000000000425158 nenubd_ - *fill* 0x000000000042562a 0x2 - .text 0x000000000042562c 0x17b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numbck.o) - 0x000000000042562c numbck_ - *fill* 0x00000000004257a7 0x1 - .text 0x00000000004257a8 0x637 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtab.o) - 0x00000000004257a8 numtab_ - *fill* 0x0000000000425ddf 0x1 - .text 0x0000000000425de0 0x217 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbt.o) - 0x0000000000425de0 openbt_ - *fill* 0x0000000000425ff7 0x1 - .text 0x0000000000425ff8 0x739 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parstr.o) - 0x0000000000425ff8 parstr_ - *fill* 0x0000000000426731 0x3 - .text 0x0000000000426734 0x1044 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) - 0x0000000000426734 parusr_ - .text 0x0000000000427778 0x8d9 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - 0x0000000000427778 parutg_ - *fill* 0x0000000000428051 0x3 - .text 0x0000000000428054 0x8cf /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - 0x0000000000428054 rcstpl_ - *fill* 0x0000000000428923 0x1 - .text 0x0000000000428924 0x1ed /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgb.o) - 0x0000000000428924 rdmsgb_ - *fill* 0x0000000000428b11 0x3 - .text 0x0000000000428b14 0x4e2 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) - 0x0000000000428b14 restd_ - *fill* 0x0000000000428ff6 0x2 - .text 0x0000000000428ff8 0x7c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rsvfvm.o) - 0x0000000000428ff8 rsvfvm_ - .text 0x0000000000429074 0x25 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strcln.o) - 0x0000000000429074 strcln_ - *fill* 0x0000000000429099 0x3 - .text 0x000000000042909c 0xe9b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - 0x000000000042909c tabsub_ - *fill* 0x0000000000429f37 0x1 - .text 0x0000000000429f38 0x227 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(uptdd.o) - 0x0000000000429f38 uptdd_ - *fill* 0x000000000042a15f 0x1 - .text 0x000000000042a160 0xa8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdesc.o) - 0x000000000042a160 wrdesc_ - .text 0x000000000042a208 0x9f /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cadn30.o) - 0x000000000042a208 cadn30_ - *fill* 0x000000000042a2a7 0x1 - .text 0x000000000042a2a8 0x368 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chekstab.o) - 0x000000000042a2a8 chekstab_ - .text 0x000000000042a610 0x2dd /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(inctab.o) - 0x000000000042a610 inctab_ - *fill* 0x000000000042a8ed 0x3 - .text 0x000000000042a8f0 0x86c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) - 0x000000000042a8f0 nemtbb_ - .text 0x000000000042b15c 0xaf0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbd.o) - 0x000000000042b15c nemtbd_ - .text 0x000000000042bc4c 0x314 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtbd.o) - 0x000000000042bc4c numtbd_ - .text 0x000000000042bf60 0x886 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabent.o) - 0x000000000042bf60 tabent_ - *fill* 0x000000000042c7e6 0x2 - .text 0x000000000042c7e8 0x609 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) - 0x000000000042c7e8 valx_ - *fill* 0x000000000042cdf1 0x3 - .text 0x000000000042cdf4 0xaf /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rjust.o) - 0x000000000042cdf4 rjust_ - *fill* 0x000000000042cea3 0xd - .text 0x000000000042ceb0 0x2a50 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - 0x000000000042ceb0 for__process_start_time - 0x000000000042ced0 for__signal_handler - 0x000000000042de20 for_enable_underflow - 0x000000000042de40 for_get_fpe_ - 0x000000000042e020 for_set_fpe_ - 0x000000000042e3a0 for_fpe_service - 0x000000000042e750 for_get_fpe_counts_ - 0x000000000042e7a0 for_rtl_finish_ - 0x000000000042e7c0 dump_dfil_exception_info - 0x000000000042f6a0 for_rtl_init_ - .text 0x000000000042f900 0x1120 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) - 0x000000000042f900 for__adjust_buffer - 0x000000000042fb50 for__lower_bound_index - 0x000000000042fba0 for__cvt_foreign_read - 0x00000000004300f0 for__cvt_foreign_write - 0x00000000004308f0 for__cvt_foreign_check - 0x0000000000430970 for_check_env_name - .text 0x0000000000430a20 0x5ca0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) - 0x0000000000430a20 SetEndian - 0x0000000000430e20 CheckStreamRecortType - 0x0000000000431300 CheckEndian - 0x0000000000431760 for_open - 0x0000000000432a80 for__update_reopen_keywords - 0x0000000000433a50 for__set_foreign_bits - 0x0000000000434d70 for__open_key - 0x0000000000435020 for__open_args - 0x00000000004357e0 for__find_iomsg - 0x0000000000435880 for__set_terminator_option - 0x0000000000435d80 for__set_conversion_option - 0x0000000000436190 for__is_special_device - 0x0000000000436340 for__open_default - .text 0x00000000004366c0 0x240 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_preconnected_units_init.o) - 0x00000000004366c0 for__preconnected_units_create - .text 0x0000000000436900 0x280 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_reentrancy.o) - 0x0000000000436900 for_set_reentrancy - 0x0000000000436920 for__reentrancy_cleanup - 0x00000000004369b0 for__disable_asynch_deliv_private - 0x00000000004369d0 for__enable_asynch_deliv_private - 0x00000000004369f0 for__once_private - 0x0000000000436a40 for__reentrancy_init - .text 0x0000000000436b80 0x870 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) - 0x0000000000436b80 for_since_epoch - 0x0000000000436c20 for_since_epoch_t - 0x0000000000436cc0 for_since_epoch_x - 0x0000000000436dc0 for_secnds - 0x0000000000436ed0 for_secnds_t - 0x0000000000436fe0 for_secnds_x - 0x0000000000437240 for_cpusec - 0x00000000004372d0 for_cpusec_t - 0x0000000000437350 for_cpusec_x - .text 0x00000000004373f0 0x2b50 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) - 0x00000000004373f0 for_abort - 0x0000000000437eb0 for_stop_core_impl - 0x0000000000438a60 for_stop_core - 0x0000000000439590 for_stop - .text 0x0000000000439f40 0x1070 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_vm.o) - 0x0000000000439f40 for__set_signal_ops_during_vm - 0x0000000000439f80 for__get_vm - 0x000000000043a0c0 for__realloc_vm - 0x000000000043a1b0 for__free_vm - 0x000000000043a230 for_allocate - 0x000000000043a5a0 for_alloc_allocatable - 0x000000000043a920 for_deallocate - 0x000000000043aab0 for_dealloc_allocatable - 0x000000000043ac60 for_check_mult_overflow - 0x000000000043ad80 for_check_mult_overflow64 - 0x000000000043af00 for__spec_align_alloc - 0x000000000043afa0 for__spec_align_free - .text 0x000000000043afb0 0x39f0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) - 0x000000000043afb0 for_write_int_fmt - 0x000000000043c120 for_write_int_fmt_xmit - .text 0x000000000043e9a0 0x4b60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_fmt.o) - 0x000000000043e9a0 for_write_seq_fmt - 0x0000000000440750 for_write_seq_fmt_xmit - 0x0000000000443330 for__write_args - .text 0x0000000000443500 0x6b90 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) - 0x0000000000443500 ensure_one_leading_blank_before_data - 0x0000000000443910 for_write_seq_lis - 0x00000000004454f0 for_write_seq_lis_xmit - .text 0x000000000044a090 0x4f30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - 0x000000000044a090 for__aio_acquire_lun_fname - 0x000000000044a360 for__aio_release - 0x000000000044a430 for__aio_acquire_lun - 0x000000000044aca0 for__aio_release_lun - 0x000000000044b2c0 for__aio_destroy - 0x000000000044b760 for_asynchronous - 0x000000000044c7a0 for_waitid - 0x000000000044d640 for_wait - 0x000000000044e390 for__aio_check_unit - 0x000000000044e5d0 for__aio_error_handling - 0x000000000044ee00 for__aio_init - .text 0x000000000044efc0 0x5350 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) - 0x000000000044efc0 fname_from_piped_fd - 0x000000000044f200 for__reopen_file - 0x000000000044f2f0 for__compute_filename - 0x0000000000451a80 for__open_proc - 0x0000000000454300 for__decl_exit_hand - .text 0x0000000000454310 0xb0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio_wrap.o) - 0x0000000000454310 for__aio_pthread_self - 0x0000000000454320 for__aio_pthread_create - 0x0000000000454340 for__aio_pthread_cancel - 0x0000000000454350 for__aio_pthread_detach - 0x0000000000454360 for__aio_pthread_mutex_lock - 0x0000000000454370 for__aio_pthread_mutex_unlock - 0x0000000000454380 for__aio_pthread_cond_wait - 0x0000000000454390 for__aio_pthread_cond_signal - 0x00000000004543a0 for__aio_pthread_mutex_init - 0x00000000004543b0 for__aio_pthread_exit - .text 0x00000000004543c0 0xad0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_int.o) - 0x00000000004543c0 cvt_text_to_integer - 0x00000000004546e0 cvt_text_to_unsigned - 0x0000000000454910 cvt_text_to_integer64 - 0x0000000000454c40 cvt_text_to_unsigned64 - .text 0x0000000000454e90 0xd20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_f.o) - 0x0000000000454e90 cvt_vax_f_to_ieee_single_ - 0x00000000004552f0 CVT_VAX_F_TO_IEEE_SINGLE - 0x0000000000455750 cvt_vax_f_to_ieee_single - .text 0x0000000000455bb0 0xf80 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_d.o) - 0x0000000000455bb0 cvt_vax_d_to_ieee_double_ - 0x00000000004560e0 CVT_VAX_D_TO_IEEE_DOUBLE - 0x0000000000456610 cvt_vax_d_to_ieee_double - .text 0x0000000000456b30 0xf20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_g.o) - 0x0000000000456b30 cvt_vax_g_to_ieee_double_ - 0x0000000000457030 CVT_VAX_G_TO_IEEE_DOUBLE - 0x0000000000457530 cvt_vax_g_to_ieee_double - .text 0x0000000000457a50 0x1f40 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cray.o) - 0x0000000000457a50 cvt_cray_to_ieee_single_ - 0x0000000000457f20 CVT_CRAY_TO_IEEE_SINGLE - 0x00000000004583f0 cvt_cray_to_ieee_single - 0x00000000004588e0 cvt_cray_to_ieee_double_ - 0x0000000000458e60 CVT_CRAY_TO_IEEE_DOUBLE - 0x00000000004593e0 cvt_cray_to_ieee_double - .text 0x0000000000459990 0xd80 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_short.o) - 0x0000000000459990 cvt_ibm_short_to_ieee_single_ - 0x0000000000459e10 CVT_IBM_SHORT_TO_IEEE_SINGLE - 0x000000000045a290 cvt_ibm_short_to_ieee_single - .text 0x000000000045a710 0x1070 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_long.o) - 0x000000000045a710 cvt_ibm_long_to_ieee_double_ - 0x000000000045ac70 CVT_IBM_LONG_TO_IEEE_DOUBLE - 0x000000000045b1d0 cvt_ibm_long_to_ieee_double - .text 0x000000000045b780 0x4080 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_double.o) - 0x000000000045b780 cvt_ieee_double_to_cray_ - 0x000000000045bc00 CVT_IEEE_DOUBLE_TO_CRAY - 0x000000000045c080 cvt_ieee_double_to_cray - 0x000000000045c500 cvt_ieee_double_to_ibm_long_ - 0x000000000045c9c0 CVT_IEEE_DOUBLE_TO_IBM_LONG - 0x000000000045ce80 cvt_ieee_double_to_ibm_long - 0x000000000045d360 cvt_ieee_double_to_vax_d_ - 0x000000000045d730 CVT_IEEE_DOUBLE_TO_VAX_D - 0x000000000045db00 cvt_ieee_double_to_vax_d - 0x000000000045df00 cvt_ieee_double_to_vax_g_ - 0x000000000045e2d0 CVT_IEEE_DOUBLE_TO_VAX_G - 0x000000000045e6a0 cvt_ieee_double_to_vax_g - 0x000000000045eaa0 cvt_ieee_double_to_vax_h_ - 0x000000000045ef10 CVT_IEEE_DOUBLE_TO_VAX_H - 0x000000000045f380 cvt_ieee_double_to_vax_h - .text 0x000000000045f800 0x1fe0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_single.o) - 0x000000000045f800 cvt_ieee_single_to_cray_ - 0x000000000045fba0 CVT_IEEE_SINGLE_TO_CRAY - 0x000000000045ff40 cvt_ieee_single_to_cray - 0x0000000000460300 cvt_ieee_single_to_ibm_short_ - 0x00000000004606f0 CVT_IEEE_SINGLE_TO_IBM_SHORT - 0x0000000000460ae0 cvt_ieee_single_to_ibm_short - 0x0000000000460ef0 cvt_ieee_single_to_vax_f_ - 0x00000000004611f0 CVT_IEEE_SINGLE_TO_VAX_F - 0x00000000004614f0 cvt_ieee_single_to_vax_f - .text 0x00000000004617e0 0x750 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close.o) - 0x00000000004617e0 for_close - 0x0000000000461ce0 for__close_args - 0x0000000000461e10 for__close_default - .text 0x0000000000461f30 0x6d0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) - 0x0000000000461f30 for__close_proc - .text 0x0000000000462600 0x220 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_default_io_sizes_env_init.o) - 0x0000000000462600 for__default_io_sizes_env_init - .text 0x0000000000462820 0xbd0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_desc_item.o) - 0x0000000000462820 for__desc_ret_item - 0x0000000000462b30 for__key_desc_ret_item - 0x0000000000462e60 for__desc_test_item - 0x0000000000463080 for__desc_zero_length_item - .text 0x00000000004633f0 0x4a50 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - 0x00000000004633f0 for__this_image_number_or_zero - 0x0000000000463440 for__io_return - 0x0000000000464040 for__issue_diagnostic - 0x00000000004649e0 for__get_msg - 0x0000000000464ce0 for_emit_diagnostic - 0x0000000000464e50 for__message_catalog_close - 0x00000000004655b0 for_errmsg - 0x0000000000465770 for__rtc_uninit_use - 0x0000000000465790 for__rtc_uninit_use_src - 0x00000000004657b0 TRACEBACKQQ - 0x00000000004659f0 tracebackqq_ - 0x0000000000465c30 for_perror_ - 0x0000000000466e20 for_gerror_ - 0x0000000000467bc0 for__establish_user_error_handler - 0x0000000000467c00 for__continue_traceback_ - 0x0000000000467d20 for__continue_traceback - .text 0x0000000000467e40 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit.o) - 0x0000000000467e40 for_exit - .text 0x0000000000467e60 0x2f0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit_handler.o) - 0x0000000000467e60 for__fpe_exit_handler - 0x0000000000467f40 for__exit_handler - .text 0x0000000000468150 0x3320 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_comp.o) - 0x0000000000468150 for__format_compiler - .text 0x000000000046b470 0x1810 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) - 0x000000000046b470 for__format_value - 0x000000000046c1c0 for__cvt_value - .text 0x000000000046cc80 0x2490 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_get.o) - 0x000000000046cc80 for__get_s - 0x000000000046e050 for__read_input - 0x000000000046e170 for__get_d - 0x000000000046e520 for__get_su_fields - 0x000000000046ef30 for__get_more_fields - .text 0x000000000046f110 0xe80 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_intrp_fmt.o) - 0x000000000046f110 for__interp_fmt - .text 0x000000000046ff90 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_ldir_wfs.o) - .text 0x000000000046ff90 0x2540 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_lub_mgt.o) - 0x000000000046ff90 for__acquire_lun - 0x0000000000470e40 for__release_lun - 0x0000000000471160 for__create_lub - 0x0000000000471300 for__deallocate_lub - 0x0000000000471e30 for__get_next_lub - 0x00000000004722a0 for__get_free_newunit - 0x0000000000472470 for__release_newunit - .text 0x00000000004724d0 0x4e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_need_lf.o) - 0x00000000004724d0 for__add_to_lf_table - 0x0000000000472930 for__rm_from_lf_table - .text 0x00000000004729b0 0x1ec0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_put.o) - 0x00000000004729b0 for__put_su - 0x0000000000473030 for__write_output - 0x0000000000473410 for__put_sf - 0x00000000004744d0 for__put_d - 0x0000000000474760 for__flush_readahead - .text 0x0000000000474870 0x6200 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq.o) - 0x0000000000474870 for_write_seq - 0x0000000000475f70 for_write_seq_xmit - 0x000000000047a610 for__finish_ufseq_write - .text 0x000000000047aa70 0x1360 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) - 0x000000000047bb70 tbk_stack_trace - .text 0x000000000047bdd0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt__globals.o) - .text 0x000000000047bdd0 0x7d0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_int_to_text.o) - 0x000000000047bdd0 cvt_integer_to_text - 0x000000000047bfc0 cvt_unsigned_to_text - 0x000000000047c1a0 cvt_integer64_to_text - 0x000000000047c3b0 cvt_unsigned64_to_text - .text 0x000000000047c5a0 0x780 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_data_to_text.o) - 0x000000000047c5a0 cvt_data_to_text - 0x000000000047c960 cvt_data64_to_text - .text 0x000000000047cd20 0x8d0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_log_to_text.o) - 0x000000000047cd20 cvt_boolean_to_text - 0x000000000047d020 cvt_boolean_to_text_ex - 0x000000000047d320 cvt_boolean64_to_text - .text 0x000000000047d5f0 0x570 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_data.o) - 0x000000000047d5f0 cvt_text_to_data - 0x000000000047d8d0 cvt_text_to_data64 - .text 0x000000000047db60 0x220 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_log.o) - 0x000000000047db60 cvt_text_to_boolean - 0x000000000047dc70 cvt_text_to_boolean64 - .text 0x000000000047dd80 0x2820 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) - 0x000000000047dd80 cvt_ieee_t_to_text_ex - 0x000000000047f0f0 cvt_ieee_t_to_text - 0x0000000000480410 cvt_text_to_ieee_t_ex - .text 0x00000000004805a0 0x2760 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) - 0x00000000004805a0 cvt_ieee_s_to_text_ex - 0x00000000004818b0 cvt_ieee_s_to_text - 0x0000000000482b70 cvt_text_to_ieee_s_ex - .text 0x0000000000482d00 0x1610 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) - 0x0000000000482d00 cvt_ieee_x_to_text - 0x0000000000482d50 cvt_ieee_x_to_text_ex - 0x0000000000484180 cvt_text_to_ieee_x_ex - .text 0x0000000000484310 0x1660 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) - 0x0000000000484310 cvtas_a_to_s - .text 0x0000000000485970 0x2bb0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) - 0x0000000000485970 cvtas_a_to_t - .text 0x0000000000488520 0x53f0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) - 0x0000000000488520 cvtas_s_to_a - .text 0x000000000048d910 0x5530 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_t_to_a.o) - 0x000000000048d910 cvtas_t_to_a - .text 0x0000000000492e40 0xd0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_s.o) - 0x0000000000492e40 cvtas__nan_s - .text 0x0000000000492f10 0xc0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_t.o) - 0x0000000000492f10 cvtas__nan_t - .text 0x0000000000492fd0 0x5270 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) - 0x0000000000492fd0 cvtas_a_to_x - .text 0x0000000000498240 0x5750 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_x_to_a.o) - 0x0000000000498240 cvtas_x_to_a - .text 0x000000000049d990 0xf0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_x.o) - 0x000000000049d990 cvtas__nan_x - .text 0x000000000049da80 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_globals.o) - .text 0x000000000049da80 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_53.o) - .text 0x000000000049da80 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_64.o) - .text 0x000000000049da80 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_128.o) - .text 0x000000000049da80 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(fetestexcept.o) - 0x000000000049da80 fetestexcept - .text 0x000000000049dab0 0x50 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) - 0x000000000049dab0 lroundf - .text 0x000000000049db00 0x50 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_stub.o) - 0x000000000049db00 lround - .text 0x000000000049db50 0x170 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_ct.o) - 0x000000000049db50 lround.L - .text 0x000000000049dcc0 0x130 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_ct.o) - 0x000000000049dcc0 lroundf.L - .text 0x000000000049ddf0 0xe0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_gen.o) - 0x000000000049ddf0 lroundf.A - .text 0x000000000049ded0 0xf0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_gen.o) - 0x000000000049ded0 lround.A - .text 0x000000000049dfc0 0x7f0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - 0x000000000049e1d0 __libm_copy_value - 0x000000000049e320 __libm_error_support - 0x000000000049e720 __libm_setusermatherrl - 0x000000000049e750 __libm_setusermatherr - 0x000000000049e780 __libm_setusermatherrf - .text 0x000000000049e7b0 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrf.o) - 0x000000000049e7b0 matherrf - .text 0x000000000049e7c0 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrl.o) - 0x000000000049e7c0 matherrl - .text 0x000000000049e7d0 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherr.o) - 0x000000000049e7d0 matherr - .text 0x000000000049e7e0 0x1e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ints2q.o) - 0x000000000049e7e0 __ktoq - 0x000000000049e870 __jtoq - 0x000000000049e920 __itoq - 0x000000000049e980 __utoq - .text 0x000000000049e9c0 0x560 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(qcomp.o) - 0x000000000049e9c0 __eqq - 0x000000000049ea40 __neq - 0x000000000049ead0 __leq - 0x000000000049ebb0 __ltq - 0x000000000049ec90 __geq - 0x000000000049ed70 __gtq - 0x000000000049ee50 __compareq - .text 0x000000000049ef20 0x1d0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fp2q.o) - 0x000000000049ef20 __dtoq - 0x000000000049eff0 __ltoq - 0x000000000049f060 __ftoq - .text 0x000000000049f0f0 0x7e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(q2fp.o) - 0x000000000049f0f0 __qtod - 0x000000000049f3e0 __qtol - 0x000000000049f620 __qtof - .text 0x000000000049f8d0 0x650 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) - 0x000000000049f8d0 tbk_string_stack_signal - 0x000000000049f936 tbk_string_stack_signal_impl - .text 0x000000000049ff20 0x1640 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) - 0x000000000049ff20 tbk_getPC - 0x000000000049ff30 tbk_getRetAddr - 0x000000000049ff40 tbk_getFramePtr - 0x000000000049ff50 tbk_getModuleName - 0x00000000004a0270 tbk_get_pc_info - 0x00000000004a0e30 tbk_geterrorstring - 0x00000000004a0fe0 tbk_trace_stack - 0x00000000004a1034 tbk_trace_stack_impl - .text 0x00000000004a1560 0x460 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(cpu_feature_disp.o) - 0x00000000004a1560 __intel_cpu_features_init_x - 0x00000000004a1580 __intel_cpu_features_init - .text 0x00000000004a19c0 0xc0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) - 0x00000000004a19c0 _intel_fast_memcpy.A - 0x00000000004a19d0 _intel_fast_memcpy.J - 0x00000000004a19e0 _intel_fast_memcpy.M - 0x00000000004a19f0 _intel_fast_memcpy.P - 0x00000000004a1a00 _intel_fast_memcpy - .text 0x00000000004a1a80 0x90 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) - 0x00000000004a1a80 _intel_fast_memmove.A - 0x00000000004a1a90 _intel_fast_memmove.M - 0x00000000004a1aa0 _intel_fast_memmove.P - 0x00000000004a1ab0 _intel_fast_memmove - .text 0x00000000004a1b10 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemset.o) - 0x00000000004a1b10 _intel_fast_memset.A - 0x00000000004a1b20 _intel_fast_memset.J - 0x00000000004a1b30 _intel_fast_memset - .text 0x00000000004a1b70 0x360 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(new_proc_init.o) - 0x00000000004a1b70 __intel_new_feature_proc_init - .text 0x00000000004a1ed0 0x3190 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_addsubq.o) - 0x00000000004a31e0 __addq - 0x00000000004a3290 __subq - .text 0x00000000004a5060 0x1c00 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_divq.o) - 0x00000000004a5060 __divq.L - 0x00000000004a5e40 __divq.A - 0x00000000004a6c20 __divq - .text 0x00000000004a6c60 0x130 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcpy.o) - 0x00000000004a6c60 __intel_sse2_strcpy - .text 0x00000000004a6d90 0x190 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncpy.o) - 0x00000000004a6d90 __intel_sse2_strncpy - .text 0x00000000004a6f20 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strlen.o) - 0x00000000004a6f20 __intel_sse2_strlen - .text 0x00000000004a6f50 0x40 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strchr.o) - 0x00000000004a6f50 __intel_sse2_strchr - .text 0x00000000004a6f90 0x2e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncmp.o) - 0x00000000004a6f90 __intel_sse2_strncmp - .text 0x00000000004a7270 0x280 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcat.o) - 0x00000000004a7270 __intel_sse2_strcat - .text 0x00000000004a74f0 0x330 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncat.o) - 0x00000000004a74f0 __intel_sse2_strncat - .text 0x00000000004a7820 0x17b0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memcpy_pp.o) - 0x00000000004a7820 __intel_memcpy - 0x00000000004a7820 __intel_new_memcpy - .text 0x00000000004a8fd0 0x11e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memset_pp.o) - 0x00000000004a8fd0 __intel_memset - 0x00000000004a8fd0 __intel_new_memset - .text 0x00000000004aa1b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memcpy.o) - .text.ssse3 0x00000000004aa1b0 0x29c5 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memcpy.o) - 0x00000000004aa1b0 __intel_ssse3_memcpy - *fill* 0x00000000004acb75 0x3 - .text 0x00000000004acb78 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memcpy.o) - *fill* 0x00000000004acb78 0x8 - .text.ssse3 0x00000000004acb80 0x2ab6 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memcpy.o) - 0x00000000004acb80 __intel_ssse3_rep_memcpy - *fill* 0x00000000004af636 0x2 - .text 0x00000000004af638 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memmove.o) - *fill* 0x00000000004af638 0x8 - .text.ssse3 0x00000000004af640 0x2b76 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memmove.o) - 0x00000000004af640 __intel_ssse3_memmove - *fill* 0x00000000004b21b6 0x2 - .text 0x00000000004b21b8 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memmove.o) - *fill* 0x00000000004b21b8 0x8 - .text.ssse3 0x00000000004b21c0 0x2af6 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memmove.o) - 0x00000000004b21c0 __intel_ssse3_rep_memmove - *fill* 0x00000000004b4cb6 0xa - .text 0x00000000004b4cc0 0x4e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - 0x00000000004b4cc0 __libirc_get_msg - 0x00000000004b4ef0 __libirc_print - .text 0x00000000004b51a0 0xbe0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_mem_ops.o) - 0x00000000004b51a0 __cacheSize - .text 0x00000000004b5d80 0xb0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(proc_init_utils.o) - 0x00000000004b5d80 __intel_proc_init_ftzdazule - .text 0x00000000004b5e30 0x99 /usr/lib64/libc_nonshared.a(elf-init.oS) - 0x00000000004b5e30 __libc_csu_fini - 0x00000000004b5e40 __libc_csu_init - *fill* 0x00000000004b5ec9 0x7 - .text 0x00000000004b5ed0 0x51 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - 0x00000000004b5ed0 __powidf2 - *fill* 0x00000000004b5f21 0xf - .text 0x00000000004b5f30 0x36 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - *fill* 0x00000000004b5f66 0x2 - .text 0x00000000004b5f68 0x0 /usr/lib/../lib64/crtn.o - *(.gnu.warning) - -.fini 0x00000000004b5f68 0x16 - *(SORT(.fini)) - .fini 0x00000000004b5f68 0x10 /usr/lib/../lib64/crti.o - 0x00000000004b5f68 _fini - .fini 0x00000000004b5f78 0x5 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - .fini 0x00000000004b5f7d 0x1 /usr/lib/../lib64/crtn.o - 0x00000000004b5f7e PROVIDE (__etext, .) - 0x00000000004b5f7e PROVIDE (_etext, .) - 0x00000000004b5f7e PROVIDE (etext, .) - -.rodata 0x00000000004b5f80 0x1a5e8 - *(.rodata .rodata.* .gnu.linkonce.r.*) - .rodata.cst4 0x00000000004b5f80 0x4 /usr/lib/../lib64/crt1.o - 0x00000000004b5f80 _IO_stdin_used - *fill* 0x00000000004b5f84 0x4 - .rodata 0x00000000004b5f88 0x240 rdbfmsua.o - .rodata.str1.4 - 0x00000000004b61c8 0x1c2 rdbfmsua.o - 0x208 (size before relaxing) - *fill* 0x00000000004b638a 0x6 - .rodata 0x00000000004b6390 0x50 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) - .rodata 0x00000000004b63e0 0x55 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) - .rodata 0x00000000004b6435 0x21 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) - .rodata 0x00000000004b6456 0x1 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stldsp.o) - *fill* 0x00000000004b6457 0x1 - .rodata 0x00000000004b6458 0xc /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) - *fill* 0x00000000004b6464 0x4 - .rodata 0x00000000004b6468 0x50 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flbksp.o) - .rodata 0x00000000004b64b8 0x58 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) - .rodata 0x00000000004b6510 0x1 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flpath.o) - *fill* 0x00000000004b6511 0x7 - .rodata 0x00000000004b6518 0x5d /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) - .rodata 0x00000000004b6575 0x3 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) - .rodata 0x00000000004b6578 0xc4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbastn.o) - *fill* 0x00000000004b663c 0x4 - .rodata 0x00000000004b6640 0x3a2 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - *fill* 0x00000000004b69e2 0x6 - .rodata 0x00000000004b69e8 0x76 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) - *fill* 0x00000000004b6a5e 0x2 - .rodata 0x00000000004b6a60 0x3e /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapx.o) - *fill* 0x00000000004b6a9e 0x2 - .rodata 0x00000000004b6aa0 0x14 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - *fill* 0x00000000004b6ab4 0x4 - .rodata 0x00000000004b6ab8 0x3bb /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) - *fill* 0x00000000004b6e73 0x5 - .rodata 0x00000000004b6e78 0x97 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - *fill* 0x00000000004b6f0f 0x1 - .rodata 0x00000000004b6f10 0xe8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - .rodata 0x00000000004b6ff8 0x5c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(status.o) - *fill* 0x00000000004b7054 0x4 - .rodata 0x00000000004b7058 0x478 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - .rodata 0x00000000004b74d0 0x7c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) - *fill* 0x00000000004b754c 0x4 - .rodata 0x00000000004b7550 0x331 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - *fill* 0x00000000004b7881 0x7 - .rodata 0x00000000004b7888 0xad /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - *fill* 0x00000000004b7935 0x3 - .rodata 0x00000000004b7938 0x1fc /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wtstat.o) - *fill* 0x00000000004b7b34 0x4 - .rodata 0x00000000004b7b38 0xe4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(adn30.o) - .rodata 0x00000000004b7c1c 0x2c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - .rodata 0x00000000004b7c48 0x41 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) - *fill* 0x00000000004b7c89 0x7 - .rodata 0x00000000004b7c90 0x41 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort.o) - *fill* 0x00000000004b7cd1 0x3 - .rodata 0x00000000004b7cd4 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cpbfdx.o) - .rodata 0x00000000004b7cd8 0xc /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(drstpl.o) - .rodata 0x00000000004b7ce4 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) - *fill* 0x00000000004b7cec 0x4 - .rodata 0x00000000004b7cf0 0xea /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxmini.o) - *fill* 0x00000000004b7dda 0x6 - .rodata 0x00000000004b7de0 0x57 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) - *fill* 0x00000000004b7e37 0x9 - .rodata 0x00000000004b7e40 0x10 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ibfms.o) - .rodata 0x00000000004b7e50 0x11 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ifxy.o) - *fill* 0x00000000004b7e61 0x7 - .rodata 0x00000000004b7e68 0x82 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invcon.o) - *fill* 0x00000000004b7eea 0x6 - .rodata 0x00000000004b7ef0 0x82 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invwin.o) - *fill* 0x00000000004b7f72 0x6 - .rodata 0x00000000004b7f78 0x86 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ipkm.o) - *fill* 0x00000000004b7ffe 0x2 - .rodata 0x00000000004b8000 0x87 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupm.o) - *fill* 0x00000000004b8087 0x1 - .rodata 0x00000000004b8088 0xd0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrpc.o) - .rodata 0x00000000004b8158 0xd0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrps.o) - .rodata 0x00000000004b8228 0x144 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - *fill* 0x00000000004b836c 0x4 - .rodata 0x00000000004b8370 0x82 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(newwin.o) - .rodata 0x00000000004b83f2 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nmwrd.o) - *fill* 0x00000000004b83f6 0x2 - .rodata 0x00000000004b83f8 0x82 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nxtwin.o) - *fill* 0x00000000004b847a 0x6 - .rodata 0x00000000004b8480 0xeb /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ovrbs1.o) - *fill* 0x00000000004b856b 0x5 - .rodata 0x00000000004b8570 0x6c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(padmsg.o) - *fill* 0x00000000004b85dc 0x4 - .rodata 0x00000000004b85e0 0xc5 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) - *fill* 0x00000000004b86a5 0x3 - .rodata 0x00000000004b86a8 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkc.o) - .rodata 0x00000000004b86b0 0x11c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pktdd.o) - *fill* 0x00000000004b87cc 0x4 - .rodata 0x00000000004b87d0 0x80 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs01.o) - .rodata 0x00000000004b8850 0xfc /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs1.o) - *fill* 0x00000000004b894c 0x4 - .rodata 0x00000000004b8950 0x37b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) - *fill* 0x00000000004b8ccb 0x1 - .rodata 0x00000000004b8ccc 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) - *fill* 0x00000000004b8cd4 0x4 - .rodata 0x00000000004b8cd8 0x494 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) - *fill* 0x00000000004b916c 0x4 - .rodata 0x00000000004b9170 0xde /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) - *fill* 0x00000000004b924e 0x2 - .rodata 0x00000000004b9250 0x662 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) - *fill* 0x00000000004b98b2 0x6 - .rodata 0x00000000004b98b8 0x24c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) - *fill* 0x00000000004b9b04 0x4 - .rodata 0x00000000004b9b08 0xf2 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - *fill* 0x00000000004b9bfa 0x6 - .rodata 0x00000000004b9c00 0x84 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) - *fill* 0x00000000004b9c84 0x4 - .rodata 0x00000000004b9c88 0xb2 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strsuc.o) - *fill* 0x00000000004b9d3a 0x2 - .rodata 0x00000000004b9d3c 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) - .rodata 0x00000000004b9d44 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upc.o) - *fill* 0x00000000004b9d4c 0x4 - .rodata 0x00000000004b9d50 0x4f0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(usrtpl.o) - .rodata 0x00000000004ba240 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrna.o) - .rodata 0x00000000004ba248 0x274 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) - *fill* 0x00000000004ba4bc 0x4 - .rodata 0x00000000004ba4c0 0xf5 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cnved4.o) - *fill* 0x00000000004ba5b5 0x3 - .rodata 0x00000000004ba5b8 0x14a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) - *fill* 0x00000000004ba702 0x2 - .rodata 0x00000000004ba704 0x10 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getlens.o) - .rodata 0x00000000004ba714 0x4a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(gets1loc.o) - *fill* 0x00000000004ba75e 0x2 - .rodata 0x00000000004ba760 0x148 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(idn30.o) - .rodata 0x00000000004ba8a8 0x10 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(igetdate.o) - .rodata 0x00000000004ba8b8 0x18 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(istdesc.o) - .rodata 0x00000000004ba8d0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupbs01.o) - .rodata 0x00000000004ba8f8 0x1 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstchr.o) - *fill* 0x00000000004ba8f9 0x7 - .rodata 0x00000000004ba900 0xfa /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstnum.o) - *fill* 0x00000000004ba9fa 0x6 - .rodata 0x00000000004baa00 0xd0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstjpb.o) - .rodata 0x00000000004baad0 0x170 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) - .rodata 0x00000000004bac40 0x78 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(mvb.o) - .rodata 0x00000000004bacb8 0x1f /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtab.o) - *fill* 0x00000000004bacd7 0x1 - .rodata 0x00000000004bacd8 0xb0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbax.o) - .rodata 0x00000000004bad88 0xbb /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenuaa.o) - *fill* 0x00000000004bae43 0x5 - .rodata 0x00000000004bae48 0x16b /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenubd.o) - .rodata 0x00000000004bafb3 0x13 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numbck.o) - *fill* 0x00000000004bafc6 0x2 - .rodata 0x00000000004bafc8 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtab.o) - *fill* 0x00000000004bafef 0x1 - .rodata 0x00000000004baff0 0xca /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbt.o) - *fill* 0x00000000004bb0ba 0x6 - .rodata 0x00000000004bb0c0 0x181 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parstr.o) - *fill* 0x00000000004bb241 0x7 - .rodata 0x00000000004bb248 0x363 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) - *fill* 0x00000000004bb5ab 0x5 - .rodata 0x00000000004bb5b0 0x1e4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - *fill* 0x00000000004bb794 0x4 - .rodata 0x00000000004bb798 0xf0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - .rodata 0x00000000004bb888 0x18 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgb.o) - .rodata 0x00000000004bb8a0 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) - *fill* 0x00000000004bb8c7 0x1 - .rodata 0x00000000004bb8c8 0x3f6 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - *fill* 0x00000000004bbcbe 0x2 - .rodata 0x00000000004bbcc0 0x68 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(uptdd.o) - .rodata 0x00000000004bbd28 0x47 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdesc.o) - *fill* 0x00000000004bbd6f 0x1 - .rodata 0x00000000004bbd70 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cadn30.o) - *fill* 0x00000000004bbd74 0x4 - .rodata 0x00000000004bbd78 0x10e /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chekstab.o) - *fill* 0x00000000004bbe86 0x2 - .rodata 0x00000000004bbe88 0x6e /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(inctab.o) - *fill* 0x00000000004bbef6 0xa - .rodata 0x00000000004bbf00 0x290 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) - .rodata 0x00000000004bc190 0x2b7 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbd.o) - .rodata 0x00000000004bc447 0x1 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtbd.o) - .rodata 0x00000000004bc448 0xb1 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabent.o) - *fill* 0x00000000004bc4f9 0x7 - .rodata 0x00000000004bc500 0x108 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) - .rodata 0x00000000004bc608 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rjust.o) - .rodata.str1.4 - 0x00000000004bc610 0xd0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - .rodata 0x00000000004bc6e0 0x160 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - .rodata.str1.32 - 0x00000000004bc840 0x1ecd /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - 0x1ee0 (size before relaxing) - *fill* 0x00000000004be70d 0x13 - .rodata 0x00000000004be720 0x3c0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) - .rodata.str1.4 - 0x00000000004beae0 0x3 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) - 0x4 (size before relaxing) - *fill* 0x00000000004beae3 0x1 - .rodata.str1.4 - 0x00000000004beae4 0x3cd /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) - 0x3f0 (size before relaxing) - *fill* 0x00000000004beeb1 0xf - .rodata 0x00000000004beec0 0x1980 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) - .rodata.str1.4 - 0x00000000004c0840 0x7 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_preconnected_units_init.o) - 0x8 (size before relaxing) - *fill* 0x00000000004c0847 0x19 - .rodata 0x00000000004c0860 0x80 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_reentrancy.o) - .rodata 0x00000000004c08e0 0x70 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) - .rodata.str1.4 - 0x00000000004c0950 0xb /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) - 0x14 (size before relaxing) - *fill* 0x00000000004c095b 0x5 - .rodata 0x00000000004c0960 0x200 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) - .rodata.str1.4 - 0x00000000004c0b60 0x53 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) - 0x64 (size before relaxing) - *fill* 0x00000000004c0bb3 0xd - .rodata 0x00000000004c0bc0 0x220 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) - .rodata.str1.4 - 0x00000000004c0de0 0xf /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_fmt.o) - 0x24 (size before relaxing) - *fill* 0x00000000004c0def 0x1 - .rodata 0x00000000004c0df0 0x2e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_fmt.o) - .rodata 0x00000000004c10d0 0x4b0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) - .rodata.str1.4 - 0x00000000004c1580 0xf /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) - 0x18 (size before relaxing) - *fill* 0x00000000004c158f 0x1 - .rodata.str1.4 - 0x00000000004c1590 0xf2 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - 0x100 (size before relaxing) - *fill* 0x00000000004c1682 0x2 - .rodata.str1.4 - 0x00000000004c1684 0xb0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) - 0x104 (size before relaxing) - *fill* 0x00000000004c1734 0x4 - .rodata 0x00000000004c1738 0x310 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) - .rodata 0x00000000004c1a48 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_int.o) - .rodata 0x00000000004c1a58 0x198 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_f.o) - .rodata 0x00000000004c1bf0 0x198 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_d.o) - .rodata 0x00000000004c1d88 0x198 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_g.o) - .rodata 0x00000000004c1f20 0x330 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cray.o) - .rodata 0x00000000004c2250 0x198 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_short.o) - .rodata 0x00000000004c23e8 0x198 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_long.o) - .rodata 0x00000000004c2580 0x7f8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_double.o) - .rodata 0x00000000004c2d78 0x4c8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_single.o) - .rodata.str1.4 - 0x0000000000000000 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close.o) - .rodata 0x00000000004c3240 0x48 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) - .rodata.str1.4 - 0x00000000004c3288 0x66 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) - 0x68 (size before relaxing) - *fill* 0x00000000004c32ee 0x2 - .rodata.str1.4 - 0x00000000004c32f0 0x43 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_default_io_sizes_env_init.o) - 0x44 (size before relaxing) - *fill* 0x00000000004c3333 0xd - .rodata 0x00000000004c3340 0x200 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_desc_item.o) - 0x00000000004c34a0 for__dsc_itm_table - .rodata.str1.4 - 0x00000000004c3540 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_desc_item.o) - .rodata.str1.4 - 0x00000000004c3550 0x3c41 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - 0x3cc4 (size before relaxing) - *fill* 0x00000000004c7191 0x7 - .rodata 0x00000000004c7198 0x58 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - *fill* 0x00000000004c71f0 0x10 - .rodata.str1.32 - 0x00000000004c7200 0x1644 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - 0x1660 (size before relaxing) - .rodata.str1.4 - 0x00000000004c8844 0x13 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit_handler.o) - 0x14 (size before relaxing) - *fill* 0x00000000004c8857 0x9 - .rodata 0x00000000004c8860 0x3a0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_comp.o) - .rodata.str1.4 - 0x00000000004c8c00 0xf /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_comp.o) - 0x10 (size before relaxing) - *fill* 0x00000000004c8c0f 0x1 - .rodata 0x00000000004c8c10 0x1100 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) - .rodata 0x00000000004c9d10 0x48 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_get.o) - .rodata.str1.4 - 0x00000000004c9d58 0xa /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_get.o) - 0xc (size before relaxing) - *fill* 0x00000000004c9d62 0x1e - .rodata 0x00000000004c9d80 0xbe0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_intrp_fmt.o) - 0x00000000004ca300 for__oz_fmt_table - 0x00000000004ca360 for__b_fmt_table - 0x00000000004ca400 for__fedg_fmt_table - 0x00000000004ca4e0 for__coerce_data_types - 0x00000000004ca943 for__i_fmt_table - .rodata.str1.4 - 0x00000000004ca960 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_intrp_fmt.o) - *fill* 0x00000000004ca970 0x10 - .rodata 0x00000000004ca980 0x240 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_ldir_wfs.o) - 0x00000000004ca980 for__wfs_table - 0x00000000004caaa0 for__wfs_msf_table - .rodata 0x00000000004cabc0 0x2d0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_lub_mgt.o) - .rodata.str1.4 - 0x00000000004cae90 0xe /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_lub_mgt.o) - 0x10 (size before relaxing) - .rodata.str1.4 - 0x0000000000000000 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_need_lf.o) - *fill* 0x00000000004cae9e 0x2 - .rodata 0x00000000004caea0 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_need_lf.o) - .rodata 0x00000000004caf00 0x210 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_put.o) - .rodata.str1.4 - 0x00000000004cb110 0xa /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_put.o) - 0xc (size before relaxing) - *fill* 0x00000000004cb11a 0x2 - .rodata.str1.4 - 0x00000000004cb11c 0xb /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq.o) - 0x14 (size before relaxing) - *fill* 0x00000000004cb127 0x1 - .rodata 0x00000000004cb128 0x248 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq.o) - .rodata.str1.4 - 0x00000000004cb370 0x1ef /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) - 0x1fc (size before relaxing) - *fill* 0x00000000004cb55f 0x1 - .rodata.str1.32 - 0x00000000004cb560 0xb93 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) - 0xba0 (size before relaxing) - *fill* 0x00000000004cc0f3 0xd - .rodata 0x00000000004cc100 0x1c0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt__globals.o) - 0x00000000004cc100 vax_c - 0x00000000004cc140 ieee_t - 0x00000000004cc1b0 ieee_s - 0x00000000004cc1e8 ibm_s - 0x00000000004cc204 ibm_l - 0x00000000004cc23c cray - 0x00000000004cc274 int_c - .rodata 0x00000000004cc2c0 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_int_to_text.o) - .rodata.str1.4 - 0x00000000004cc2e0 0x11 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_int_to_text.o) - 0x14 (size before relaxing) - .rodata.str1.4 - 0x0000000000000000 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_data_to_text.o) - *fill* 0x00000000004cc2f1 0xf - .rodata 0x00000000004cc300 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_data_to_text.o) - .rodata 0x00000000004cc320 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_log_to_text.o) - .rodata 0x00000000004cc350 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) - .rodata 0x00000000004cc360 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) - .rodata 0x00000000004cc370 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) - .rodata 0x00000000004cc380 0x150 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) - .rodata.str1.4 - 0x00000000004cc4d0 0xd /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) - 0x10 (size before relaxing) - *fill* 0x00000000004cc4dd 0x3 - .rodata 0x00000000004cc4e0 0x150 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) - .rodata.str1.4 - 0x0000000000000000 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) - .rodata.str1.4 - 0x00000000004cc630 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) - 0x30 (size before relaxing) - .rodata 0x00000000004cc650 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) - .rodata.str1.4 - 0x0000000000000000 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_t_to_a.o) - .rodata 0x00000000004cc660 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_t_to_a.o) - .rodata 0x00000000004cc670 0x138 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) - .rodata.str1.4 - 0x0000000000000000 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) - .rodata.str1.4 - 0x0000000000000000 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_x_to_a.o) - *fill* 0x00000000004cc7a8 0x8 - .rodata 0x00000000004cc7b0 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_x_to_a.o) - .rodata 0x00000000004cc7c0 0x180 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_globals.o) - 0x00000000004cc7c0 cvtas_pten_word - 0x00000000004cc860 cvtas_globals_t - 0x00000000004cc8c0 cvtas_globals_x - 0x00000000004cc920 cvtas_globals_s - .rodata 0x00000000004cc940 0x4e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_53.o) - 0x00000000004cc940 cvtas_pten_t - 0x00000000004ccc40 cvtas_tiny_pten_t - 0x00000000004ccce0 cvtas_tiny_pten_t_map - 0x00000000004ccd40 cvtas_huge_pten_t - 0x00000000004ccdc0 cvtas_huge_pten_t_map - .rodata 0x00000000004cce20 0x5e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_64.o) - 0x00000000004cce20 cvtas_pten_64 - 0x00000000004cd120 cvtas_pten_64_bexp - 0x00000000004cd1e0 cvtas_tiny_pten_64 - 0x00000000004cd260 cvtas_tiny_pten_64_map - 0x00000000004cd2e0 cvtas_huge_pten_64 - 0x00000000004cd360 cvtas_huge_pten_64_map - 0x00000000004cd3ba cvtas_tiny_pten_64_bexp - 0x00000000004cd3d8 cvtas_huge_pten_64_bexp - .rodata 0x00000000004cd400 0x520 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_128.o) - 0x00000000004cd400 cvtas_pten_128 - 0x00000000004cd5c0 cvtas_tiny_tiny_pten_128 - 0x00000000004cd600 cvtas_tiny_pten_128 - 0x00000000004cd6a0 cvtas_tiny_pten_128_map - 0x00000000004cd740 cvtas_huge_huge_pten_128 - 0x00000000004cd780 cvtas_huge_pten_128 - 0x00000000004cd820 cvtas_huge_pten_128_map - 0x00000000004cd8a8 cvtas_pten_128_bexp - 0x00000000004cd8de cvtas_tiny_tiny_pten_128_bexp - 0x00000000004cd8e6 cvtas_tiny_pten_128_bexp - 0x00000000004cd8fa cvtas_huge_huge_pten_128_bexp - 0x00000000004cd902 cvtas_huge_pten_128_bexp - .rodata 0x00000000004cd920 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_ct.o) - .rodata 0x00000000004cd940 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_ct.o) - .rodata 0x00000000004cd960 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_gen.o) - .rodata 0x00000000004cd980 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_gen.o) - .rodata.str1.4 - 0x00000000004cd9a0 0x45a /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - 0x484 (size before relaxing) - *fill* 0x00000000004cddfa 0x6 - .rodata 0x00000000004cde00 0x918 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - .rodata 0x00000000004ce718 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(qcomp.o) - .rodata 0x00000000004ce720 0xc /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fp2q.o) - *fill* 0x00000000004ce72c 0x4 - .rodata 0x00000000004ce730 0x28 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(q2fp.o) - .rodata.str1.4 - 0x00000000004ce758 0x113 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) - 0x118 (size before relaxing) - *fill* 0x00000000004ce86b 0x15 - .rodata.str1.32 - 0x00000000004ce880 0xa2 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) - 0xc0 (size before relaxing) - *fill* 0x00000000004ce922 0x2 - .rodata.str1.4 - 0x00000000004ce924 0x24 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) - 0x3c (size before relaxing) - .rodata 0x00000000004ce948 0x24 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) - .rodata.str1.4 - 0x00000000004ce96c 0x14b /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(new_proc_init.o) - 0x150 (size before relaxing) - *fill* 0x00000000004ceab7 0x9 - .rodata 0x00000000004ceac0 0xa0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_addsubq.o) - .rodata 0x00000000004ceb60 0x90 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_divq.o) - .rodata.ssse3 0x00000000004cebf0 0x1c0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memcpy.o) - .rodata.ssse3 0x00000000004cedb0 0x500 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memcpy.o) - .rodata.ssse3 0x00000000004cf2b0 0x1c0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memmove.o) - .rodata.ssse3 0x00000000004cf470 0x500 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memmove.o) - .rodata.str1.4 - 0x00000000004cf970 0x58c /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - 0x5b0 (size before relaxing) - *fill* 0x00000000004cfefc 0x4 - .rodata.str1.32 - 0x00000000004cff00 0x660 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - .rodata.cst8 0x00000000004d0560 0x8 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - -.rodata1 - *(.rodata1) - -.eh_frame_hdr 0x00000000004d0568 0xee4 - *(.eh_frame_hdr) - .eh_frame_hdr 0x00000000004d0568 0xee4 /usr/lib/../lib64/crti.o - -.eh_frame 0x00000000004d1450 0xc694 - *(.eh_frame) - .eh_frame 0x00000000004d1450 0x40 /usr/lib/../lib64/crt1.o - .eh_frame 0x00000000004d1490 0x20 /usr/lib/../lib64/crti.o - 0x38 (size before relaxing) - .eh_frame 0x00000000004d14b0 0x38 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o - 0x50 (size before relaxing) - .eh_frame 0x00000000004d14e8 0x158 rdbfmsua.o - .eh_frame 0x00000000004d1640 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1660 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1680 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d16a8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d16c8 0x30 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004d16f8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stldsp.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1720 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlstr.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1740 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmbl.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1768 0x30 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmst.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004d1798 0x30 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004d17c8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flbksp.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d17e8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1810 0x30 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flpath.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004d1840 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1860 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1888 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssgsym.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d18b0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlcuc.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d18d8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stuclc.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1900 0x30 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbastn.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004d1930 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flglun.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1950 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libbridge.a(dcbsrh.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1970 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ireadns.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1990 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d19b0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d19d8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapx.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1a00 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1a28 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1a48 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1a68 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1a88 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(status.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1aa8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1ad0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1af8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upb.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1b18 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1b38 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1b60 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wtstat.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1b80 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(adn30.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1ba0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1bc8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1be8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort_exit.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1c08 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1c28 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1c50 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cpbfdx.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1c78 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(drstpl.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1c98 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1cc0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxmini.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1ce8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1d08 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ibfms.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1d28 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ichkstr.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1d50 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ifxy.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1d70 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invcon.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1d90 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invwin.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1db0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ipkm.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1dd0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(irev.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1df0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupm.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1e10 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lmsg.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1e30 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrpc.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1e50 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrps.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1e70 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1e98 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(newwin.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1eb8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nmwrd.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1ed8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nxtwin.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1ef8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ovrbs1.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1f18 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(padmsg.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1f40 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkb.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1f68 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1f88 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkc.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1fb0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pktdd.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d1fd0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs01.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d1ff8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs1.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2018 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2040 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2068 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2090 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d20b8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d20d8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2100 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2128 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2150 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2178 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strsuc.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d21a0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d21c0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upbb.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d21e0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upc.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2208 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(usrtpl.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2230 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(capit.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2250 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrna.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2278 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrn.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d22a0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d22c8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cnved4.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d22e8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(digit.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2308 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2328 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getlens.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2350 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(gets1loc.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2370 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(i4dy.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2390 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(idn30.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d23b8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(igetdate.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d23d8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(istdesc.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d23f8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupb.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2418 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupbs01.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2440 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstchr.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2468 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstnum.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2490 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstjpb.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d24b0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d24d8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(mvb.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2500 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemock.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2520 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtab.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2548 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbax.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2570 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenuaa.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2598 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenubd.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d25c0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numbck.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d25e0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtab.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2608 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbt.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2628 0x30 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parstr.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004d2658 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2680 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d26a8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d26c8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgb.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d26f0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2718 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rsvfvm.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2738 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strcln.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2758 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2780 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(uptdd.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d27a0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdesc.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d27c0 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cadn30.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d27e8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chekstab.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2810 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(inctab.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2838 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2858 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbd.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d2880 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtbd.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d28a8 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabent.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d28c8 0x28 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d28f0 0x20 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rjust.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004d2910 0x358 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - 0x370 (size before relaxing) - .eh_frame 0x00000000004d2c68 0x1c0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) - 0x1d8 (size before relaxing) - .eh_frame 0x00000000004d2e28 0xa08 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) - 0xa20 (size before relaxing) - .eh_frame 0x00000000004d3830 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_preconnected_units_init.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004d3860 0x1d0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_reentrancy.o) - 0x1e8 (size before relaxing) - .eh_frame 0x00000000004d3a30 0x1e8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) - 0x200 (size before relaxing) - .eh_frame 0x00000000004d3c18 0x238 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) - 0x250 (size before relaxing) - .eh_frame 0x00000000004d3e50 0x320 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_vm.o) - 0x338 (size before relaxing) - .eh_frame 0x00000000004d4170 0x6e8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) - 0x700 (size before relaxing) - .eh_frame 0x00000000004d4858 0x9b0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_fmt.o) - 0x9c8 (size before relaxing) - .eh_frame 0x00000000004d5208 0xbe8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) - 0xc00 (size before relaxing) - .eh_frame 0x00000000004d5df0 0x758 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - 0x770 (size before relaxing) - .eh_frame 0x00000000004d6548 0x698 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) - 0x6b0 (size before relaxing) - .eh_frame 0x00000000004d6be0 0x180 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio_wrap.o) - 0x198 (size before relaxing) - .eh_frame 0x00000000004d6d60 0x290 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_int.o) - 0x2a8 (size before relaxing) - .eh_frame 0x00000000004d6ff0 0xc8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_f.o) - 0xe0 (size before relaxing) - .eh_frame 0x00000000004d70b8 0xc8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_d.o) - 0xe0 (size before relaxing) - .eh_frame 0x00000000004d7180 0xc8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_g.o) - 0xe0 (size before relaxing) - .eh_frame 0x00000000004d7248 0x1e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cray.o) - 0x1f8 (size before relaxing) - .eh_frame 0x00000000004d7428 0xe0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_short.o) - 0xf8 (size before relaxing) - .eh_frame 0x00000000004d7508 0x108 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_long.o) - 0x120 (size before relaxing) - .eh_frame 0x00000000004d7610 0x508 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_double.o) - 0x520 (size before relaxing) - .eh_frame 0x00000000004d7b18 0x2a8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_single.o) - 0x2c0 (size before relaxing) - .eh_frame 0x00000000004d7dc0 0x220 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close.o) - 0x238 (size before relaxing) - .eh_frame 0x00000000004d7fe0 0x70 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) - 0x88 (size before relaxing) - .eh_frame 0x00000000004d8050 0x28 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_default_io_sizes_env_init.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d8078 0x1d8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_desc_item.o) - 0x1f0 (size before relaxing) - .eh_frame 0x00000000004d8250 0x4d8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - 0x4f0 (size before relaxing) - .eh_frame 0x00000000004d8728 0x28 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit.o) - 0x40 (size before relaxing) - .eh_frame 0x00000000004d8750 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit_handler.o) - 0x78 (size before relaxing) - .eh_frame 0x00000000004d87b0 0x6a8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_comp.o) - 0x6c0 (size before relaxing) - .eh_frame 0x00000000004d8e58 0xf0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) - 0x108 (size before relaxing) - .eh_frame 0x00000000004d8f48 0x4c0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_get.o) - 0x4d8 (size before relaxing) - .eh_frame 0x00000000004d9408 0xe8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_intrp_fmt.o) - 0x100 (size before relaxing) - .eh_frame 0x00000000004d94f0 0x430 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_lub_mgt.o) - 0x448 (size before relaxing) - .eh_frame 0x00000000004d9920 0xb0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_need_lf.o) - 0xc8 (size before relaxing) - .eh_frame 0x00000000004d99d0 0x220 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_put.o) - 0x238 (size before relaxing) - .eh_frame 0x00000000004d9bf0 0x9e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq.o) - 0x9f8 (size before relaxing) - .eh_frame 0x00000000004da5d0 0xa0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) - 0xb8 (size before relaxing) - .eh_frame 0x00000000004da670 0x100 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_int_to_text.o) - 0x118 (size before relaxing) - .eh_frame 0x00000000004da770 0x180 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_data_to_text.o) - 0x198 (size before relaxing) - .eh_frame 0x00000000004da8f0 0x140 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_log_to_text.o) - 0x158 (size before relaxing) - .eh_frame 0x00000000004daa30 0x150 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_data.o) - 0x168 (size before relaxing) - .eh_frame 0x00000000004dab80 0x90 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_log.o) - 0xa8 (size before relaxing) - .eh_frame 0x00000000004dac10 0x2f8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) - 0x310 (size before relaxing) - .eh_frame 0x00000000004daf08 0x2f8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) - 0x310 (size before relaxing) - .eh_frame 0x00000000004db200 0x2d8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) - 0x2f0 (size before relaxing) - .eh_frame 0x00000000004db4d8 0x90 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) - 0xa8 (size before relaxing) - .eh_frame 0x00000000004db568 0x90 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) - 0xa8 (size before relaxing) - .eh_frame 0x00000000004db5f8 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) - 0x78 (size before relaxing) - .eh_frame 0x00000000004db658 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_t_to_a.o) - 0x78 (size before relaxing) - .eh_frame 0x00000000004db6b8 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_s.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004db6d0 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_t.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004db6e8 0x90 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) - 0xa8 (size before relaxing) - .eh_frame 0x00000000004db778 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_x_to_a.o) - 0x78 (size before relaxing) - .eh_frame 0x00000000004db7d8 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_x.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004db7f0 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(fetestexcept.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004db810 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_ct.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004db830 0x20 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_ct.o) - 0x38 (size before relaxing) - .eh_frame 0x00000000004db850 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_gen.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004db880 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_gen.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004db8b0 0xe8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - 0x100 (size before relaxing) - .eh_frame 0x00000000004db998 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrf.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004db9b0 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrl.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004db9c8 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherr.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004db9e0 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ints2q.o) - 0x78 (size before relaxing) - .eh_frame 0x00000000004dba40 0xa8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(qcomp.o) - 0xc0 (size before relaxing) - .eh_frame 0x00000000004dbae8 0x48 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fp2q.o) - 0x60 (size before relaxing) - .eh_frame 0x00000000004dbb30 0x88 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(q2fp.o) - 0xa0 (size before relaxing) - .eh_frame 0x00000000004dbbb8 0x1e8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) - 0x200 (size before relaxing) - .eh_frame 0x00000000004dbda0 0x470 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) - 0x488 (size before relaxing) - .eh_frame 0x00000000004dc210 0x1a8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(cpu_feature_disp.o) - 0x1c0 (size before relaxing) - .eh_frame 0x00000000004dc3b8 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) - 0x78 (size before relaxing) - .eh_frame 0x00000000004dc418 0x48 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) - 0x60 (size before relaxing) - .eh_frame 0x00000000004dc460 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemset.o) - 0x48 (size before relaxing) - .eh_frame 0x00000000004dc490 0x80 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(new_proc_init.o) - 0x98 (size before relaxing) - .eh_frame 0x00000000004dc510 0xb30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_addsubq.o) - 0xb48 (size before relaxing) - .eh_frame 0x00000000004dd040 0x570 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_divq.o) - 0x588 (size before relaxing) - .eh_frame 0x00000000004dd5b0 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcpy.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd5c8 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncpy.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd5e0 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strlen.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd5f8 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strchr.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd610 0xd8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncmp.o) - 0xf0 (size before relaxing) - .eh_frame 0x00000000004dd6e8 0x58 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcat.o) - 0x70 (size before relaxing) - .eh_frame 0x00000000004dd740 0x68 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncat.o) - 0x80 (size before relaxing) - .eh_frame 0x00000000004dd7a8 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memcpy_pp.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd7c0 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memset_pp.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd7d8 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memcpy.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd7f0 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memcpy.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd808 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memmove.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd820 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memmove.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004dd838 0x108 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - 0x120 (size before relaxing) - .eh_frame 0x00000000004dd940 0xe0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_mem_ops.o) - 0xf8 (size before relaxing) - .eh_frame 0x00000000004dda20 0x68 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(proc_init_utils.o) - 0x80 (size before relaxing) - .eh_frame 0x00000000004dda88 0x40 /usr/lib64/libc_nonshared.a(elf-init.oS) - 0x58 (size before relaxing) - .eh_frame 0x00000000004ddac8 0x18 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - 0x30 (size before relaxing) - .eh_frame 0x00000000004ddae0 0x4 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - -.gcc_except_table - *(.gcc_except_table .gcc_except_table.*) - -.exception_ranges - *(.exception_ranges .exception_ranges*) - 0x00000000004ddae4 . = (ALIGN (0x200000) - ((0x200000 - .) & 0x1fffff)) - 0x00000000006de80c . = DATA_SEGMENT_ALIGN (0x200000, 0x1000) - -.eh_frame - *(.eh_frame) - -.gcc_except_table - *(.gcc_except_table .gcc_except_table.*) - -.exception_ranges - *(.exception_ranges .exception_ranges*) - -.tdata - *(.tdata .tdata.* .gnu.linkonce.td.*) - -.tbss - *(.tbss .tbss.* .gnu.linkonce.tb.*) - *(.tcommon) - -.preinit_array 0x00000000006de80c 0x0 - 0x00000000006de80c PROVIDE (__preinit_array_start, .) - *(.preinit_array) - 0x00000000006de80c PROVIDE (__preinit_array_end, .) - -.init_array 0x00000000006de80c 0x0 - 0x00000000006de80c PROVIDE (__init_array_start, .) - *(SORT(.init_array.*)) - *(.init_array) - 0x00000000006de80c PROVIDE (__init_array_end, .) - -.fini_array 0x00000000006de80c 0x0 - 0x00000000006de80c PROVIDE (__fini_array_start, .) - *(SORT(.fini_array.*)) - *(.fini_array) - 0x00000000006de80c PROVIDE (__fini_array_end, .) - -.ctors 0x00000000006de810 0x18 - *crtbegin.o(.ctors) - .ctors 0x00000000006de810 0x8 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - *crtbegin?.o(.ctors) - *(EXCLUDE_FILE(*crtend?.o *crtend.o) .ctors) - .ctors 0x00000000006de818 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_mem_ops.o) - *(SORT(.ctors.*)) - *(.ctors) - .ctors 0x00000000006de820 0x8 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - -.dtors 0x00000000006de828 0x10 - *crtbegin.o(.dtors) - .dtors 0x00000000006de828 0x8 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - *crtbegin?.o(.dtors) - *(EXCLUDE_FILE(*crtend?.o *crtend.o) .dtors) - *(SORT(.dtors.*)) - *(.dtors) - .dtors 0x00000000006de830 0x8 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - 0x00000000006de830 __DTOR_END__ - -.jcr 0x00000000006de838 0x8 - *(.jcr) - .jcr 0x00000000006de838 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - .jcr 0x00000000006de838 0x8 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - -.data.rel.ro 0x00000000006de840 0x480 - *(.data.rel.ro.local* .gnu.linkonce.d.rel.ro.local.*) - .data.rel.ro.local - 0x00000000006de840 0xa0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - .data.rel.ro.local - 0x00000000006de8e0 0x3e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - *(.data.rel.ro .data.rel.ro.* .gnu.linkonce.d.rel.ro.*) - -.dynamic 0x00000000006decc0 0x1e0 - *(.dynamic) - .dynamic 0x00000000006decc0 0x1e0 /usr/lib/../lib64/crt1.o - 0x00000000006decc0 _DYNAMIC - -.got 0x00000000006deea0 0x158 - *(.got) - .got 0x00000000006deea0 0x158 /usr/lib/../lib64/crt1.o - *(.igot) - 0x00000000006dffe8 . = DATA_SEGMENT_RELRO_END (., (SIZEOF (.got.plt) >= 0x18)?0x18:0x0) - -.got.plt 0x00000000006df000 0x398 - *(.got.plt) - .got.plt 0x00000000006df000 0x398 /usr/lib/../lib64/crt1.o - 0x00000000006df000 _GLOBAL_OFFSET_TABLE_ - *(.igot.plt) - .igot.plt 0x0000000000000000 0x0 /usr/lib/../lib64/crt1.o - -.data 0x00000000006df3c0 0x3ce0 - *(.data .data.* .gnu.linkonce.d.*) - .data 0x00000000006df3c0 0x4 /usr/lib/../lib64/crt1.o - 0x00000000006df3c0 data_start - 0x00000000006df3c0 __data_start - .data 0x00000000006df3c4 0x0 /usr/lib/../lib64/crti.o - *fill* 0x00000000006df3c4 0x4 - .data 0x00000000006df3c8 0x8 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - 0x00000000006df3c8 __dso_handle - .data 0x00000000006df3d0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o - *fill* 0x00000000006df3d0 0x10 - .data 0x00000000006df3e0 0x240 rdbfmsua.o - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stldsp.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlstr.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmbl.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmst.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flbksp.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flpath.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssgsym.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlcuc.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stuclc.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbastn.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flglun.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libbridge.a(dcbsrh.o) - .data 0x00000000006df620 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ireadns.o) - *fill* 0x00000000006df620 0x20 - .data 0x00000000006df640 0xd8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapx.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(status.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upb.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wtstat.o) - .data 0x00000000006df718 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(adn30.o) - *fill* 0x00000000006df718 0x28 - .data 0x00000000006df740 0x7dc /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - .data 0x00000000006dff1c 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) - .data 0x00000000006dff1c 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort_exit.o) - .data 0x00000000006dff1c 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort.o) - .data 0x00000000006dff1c 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) - .data 0x00000000006dff1c 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cpbfdx.o) - .data 0x00000000006dff1c 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(drstpl.o) - *fill* 0x00000000006dff1c 0x24 - .data 0x00000000006dff40 0x128 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxmini.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ibfms.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ichkstr.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ifxy.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invcon.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invwin.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ipkm.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(irev.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupm.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lmsg.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrpc.o) - .data 0x00000000006e0068 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrps.o) - .data 0x00000000006e0068 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(newwin.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nmwrd.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nxtwin.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ovrbs1.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(padmsg.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkb.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkc.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pktdd.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs01.o) - .data 0x00000000006e0070 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs1.o) - *fill* 0x00000000006e0070 0x10 - .data 0x00000000006e0080 0x68 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) - .data 0x00000000006e00e8 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) - .data 0x00000000006e00f0 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) - .data 0x00000000006e00f8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) - .data 0x00000000006e00f8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) - .data 0x00000000006e00f8 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) - .data 0x00000000006e0100 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) - .data 0x00000000006e0100 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - .data 0x00000000006e0100 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) - .data 0x00000000006e0100 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strsuc.o) - .data 0x00000000006e0100 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) - .data 0x00000000006e0100 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upbb.o) - .data 0x00000000006e0100 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upc.o) - .data 0x00000000006e0100 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(usrtpl.o) - .data 0x00000000006e0100 0x3a /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(capit.o) - *fill* 0x00000000006e013a 0x2 - .data 0x00000000006e013c 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrna.o) - .data 0x00000000006e013c 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrn.o) - .data 0x00000000006e013c 0xa /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) - *fill* 0x00000000006e0146 0x2 - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cnved4.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(digit.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getlens.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(gets1loc.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(i4dy.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(idn30.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(igetdate.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(istdesc.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupb.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupbs01.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstchr.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstnum.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstjpb.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) - .data 0x00000000006e0148 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(mvb.o) - *fill* 0x00000000006e0148 0x18 - .data 0x00000000006e0160 0x46 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemock.o) - *fill* 0x00000000006e01a6 0x2 - .data 0x00000000006e01a8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtab.o) - .data 0x00000000006e01a8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbax.o) - .data 0x00000000006e01a8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenuaa.o) - .data 0x00000000006e01a8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenubd.o) - .data 0x00000000006e01a8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numbck.o) - .data 0x00000000006e01a8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtab.o) - .data 0x00000000006e01a8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbt.o) - .data 0x00000000006e01a8 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parstr.o) - .data 0x00000000006e01a8 0xc /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) - *fill* 0x00000000006e01b4 0xc - .data 0x00000000006e01c0 0x60 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - .data 0x00000000006e0220 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - .data 0x00000000006e0220 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgb.o) - .data 0x00000000006e0220 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) - .data 0x00000000006e0220 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rsvfvm.o) - .data 0x00000000006e0220 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strcln.o) - .data 0x00000000006e0220 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(uptdd.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdesc.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cadn30.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chekstab.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(inctab.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbd.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtbd.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabent.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) - .data 0x00000000006e0224 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rjust.o) - *fill* 0x00000000006e0224 0x4 - .data 0x00000000006e0228 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - 0x00000000006e0228 for__segv_default_msg - 0x00000000006e0230 for__l_current_arg - .data 0x00000000006e0238 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) - .data 0x00000000006e0238 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) - .data 0x00000000006e0238 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_preconnected_units_init.o) - *fill* 0x00000000006e0238 0x8 - .data 0x00000000006e0240 0x140 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_reentrancy.o) - 0x00000000006e0240 for__static_threadstor_private - .data 0x00000000006e0380 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) - .data 0x00000000006e0380 0x80 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) - .data 0x00000000006e0400 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_vm.o) - .data 0x00000000006e0400 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) - .data 0x00000000006e0400 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_fmt.o) - .data 0x00000000006e0400 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) - .data 0x00000000006e0400 0x4 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio_wrap.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_int.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_f.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_d.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_g.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cray.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_short.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_long.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_double.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_single.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_default_io_sizes_env_init.o) - .data 0x00000000006e0404 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_desc_item.o) - *fill* 0x00000000006e0404 0x1c - .data 0x00000000006e0420 0x1e80 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - .data 0x00000000006e22a0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit.o) - .data 0x00000000006e22a0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit_handler.o) - .data 0x00000000006e22a0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_comp.o) - .data 0x00000000006e22a0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) - .data 0x00000000006e22a0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_get.o) - .data 0x00000000006e22a0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_intrp_fmt.o) - .data 0x00000000006e22a0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_ldir_wfs.o) - .data 0x00000000006e22a0 0xc /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_lub_mgt.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_need_lf.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_put.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt__globals.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_int_to_text.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_data_to_text.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_log_to_text.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_data.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_log.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_t_to_a.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_s.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_t.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_x_to_a.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_x.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_globals.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_53.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_64.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_128.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(fetestexcept.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_stub.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_ct.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_ct.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_gen.o) - .data 0x00000000006e22ac 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_gen.o) - *fill* 0x00000000006e22ac 0x14 - .data 0x00000000006e22c0 0x3c0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - 0x00000000006e2660 __libm_pmatherrf - 0x00000000006e2668 __libm_pmatherr - 0x00000000006e2670 __libm_pmatherrl - 0x00000000006e267c _LIB_VERSIONIMF - .data 0x00000000006e2680 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrf.o) - .data 0x00000000006e2680 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrl.o) - .data 0x00000000006e2680 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherr.o) - .data 0x00000000006e2680 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ints2q.o) - .data 0x00000000006e2680 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(qcomp.o) - .data 0x00000000006e2680 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fp2q.o) - .data 0x00000000006e2680 0x28 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(q2fp.o) - .data 0x00000000006e26a8 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) - .data 0x00000000006e26a8 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) - .data 0x00000000006e26a8 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(cpu_feature_disp.o) - .data 0x00000000006e26a8 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) - .data 0x00000000006e26a8 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) - .data 0x00000000006e26a8 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemset.o) - *fill* 0x00000000006e26a8 0x18 - .data 0x00000000006e26c0 0x160 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(new_proc_init.o) - .data 0x00000000006e2820 0x28 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_addsubq.o) - .data 0x00000000006e2848 0x30 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_divq.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcpy.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncpy.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strlen.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strchr.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncmp.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcat.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncat.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memcpy_pp.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memset_pp.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memcpy.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memcpy.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memmove.o) - .data 0x00000000006e2878 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memmove.o) - .data 0x00000000006e2878 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - .data 0x00000000006e2880 0x820 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_mem_ops.o) - 0x00000000006e3080 __libirc_largest_cache_size - 0x00000000006e3084 __libirc_largest_cache_size_half - 0x00000000006e3088 __libirc_data_cache_size - 0x00000000006e308c __libirc_data_cache_size_half - .data 0x00000000006e30a0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(proc_init_utils.o) - .data 0x00000000006e30a0 0x0 /usr/lib64/libc_nonshared.a(elf-init.oS) - .data 0x00000000006e30a0 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - .data 0x00000000006e30a0 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - .data 0x00000000006e30a0 0x0 /usr/lib/../lib64/crtn.o - -.tm_clone_table - 0x00000000006e30a0 0x0 - .tm_clone_table - 0x00000000006e30a0 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - .tm_clone_table - 0x00000000006e30a0 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - -.data1 - *(.data1) - 0x00000000006e30a0 _edata = . - 0x00000000006e30a0 PROVIDE (edata, .) - 0x00000000006e30a0 . = . - 0x00000000006e30a0 __bss_start = . - -.bss 0x00000000006e30c0 0x2ebcf68 - *(.dynbss) - .dynbss 0x00000000006e30c0 0x18 /usr/lib/../lib64/crt1.o - 0x00000000006e30c0 stdin@@GLIBC_2.2.5 - 0x00000000006e30c8 stderr@@GLIBC_2.2.5 - 0x00000000006e30d0 stdout@@GLIBC_2.2.5 - *(.bss .bss.* .gnu.linkonce.b.*) - .bss 0x00000000006e30d8 0x0 /usr/lib/../lib64/crt1.o - .bss 0x00000000006e30d8 0x0 /usr/lib/../lib64/crti.o - .bss 0x00000000006e30d8 0x10 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - .bss 0x00000000006e30e8 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o - *fill* 0x00000000006e30e8 0x18 - .bss 0x00000000006e3100 0x91360 rdbfmsua.o - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stldsp.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlstr.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmbl.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmst.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flbksp.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flpath.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssgsym.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlcuc.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stuclc.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbastn.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flglun.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libbridge.a(dcbsrh.o) - .bss 0x0000000000774460 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ireadns.o) - .bss 0x0000000000774460 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - *fill* 0x0000000000774464 0x1c - .bss 0x0000000000774480 0xc350 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) - *fill* 0x00000000007807d0 0x30 - .bss 0x0000000000780800 0xc350 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapx.o) - .bss 0x000000000078cb50 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - .bss 0x000000000078cb50 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) - .bss 0x000000000078cb50 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - .bss 0x000000000078cb50 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - .bss 0x000000000078cb50 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(status.o) - .bss 0x000000000078cb50 0x8 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - .bss 0x000000000078cb58 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) - .bss 0x000000000078cb58 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upb.o) - .bss 0x000000000078cb58 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - *fill* 0x000000000078cb5c 0x24 - .bss 0x000000000078cb80 0xc350 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wtstat.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(adn30.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort_exit.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cpbfdx.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(drstpl.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxmini.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ibfms.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ichkstr.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ifxy.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invcon.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invwin.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ipkm.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(irev.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupm.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lmsg.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrpc.o) - .bss 0x0000000000798ed0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrps.o) - *fill* 0x0000000000798ed0 0x30 - .bss 0x0000000000798f00 0xc350 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - .bss 0x00000000007a5250 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(newwin.o) - .bss 0x00000000007a5250 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nmwrd.o) - .bss 0x00000000007a5250 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nxtwin.o) - .bss 0x00000000007a5250 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ovrbs1.o) - .bss 0x00000000007a5254 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(padmsg.o) - .bss 0x00000000007a5254 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkb.o) - .bss 0x00000000007a5254 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) - .bss 0x00000000007a5254 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkc.o) - .bss 0x00000000007a5254 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pktdd.o) - .bss 0x00000000007a5254 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs01.o) - .bss 0x00000000007a5258 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs1.o) - *fill* 0x00000000007a525c 0x24 - .bss 0x00000000007a5280 0xc350 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) - .bss 0x00000000007b15d0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) - *fill* 0x00000000007b15d0 0x30 - .bss 0x00000000007b1600 0x13880 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strsuc.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upbb.o) - .bss 0x00000000007c4e80 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upc.o) - .bss 0x00000000007c4e80 0x3a980 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(usrtpl.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(capit.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrna.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrn.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cnved4.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(digit.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getlens.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(gets1loc.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(i4dy.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(idn30.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(igetdate.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(istdesc.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupb.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupbs01.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstchr.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstnum.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstjpb.o) - .bss 0x00000000007ff800 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) - .bss 0x00000000007ff800 0x30d40 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(mvb.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemock.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtab.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbax.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenuaa.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenubd.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numbck.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtab.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbt.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parstr.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - .bss 0x0000000000830540 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - .bss 0x0000000000830540 0x186a0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgb.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rsvfvm.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strcln.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(uptdd.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdesc.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cadn30.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chekstab.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(inctab.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbd.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtbd.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabent.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) - .bss 0x0000000000848be0 0x0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rjust.o) - .bss 0x0000000000848be0 0x48 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - 0x0000000000848bf0 for__l_excpt_info - 0x0000000000848bfc for__l_fpe_mask - 0x0000000000848c00 for__l_undcnt - 0x0000000000848c04 for__l_ovfcnt - 0x0000000000848c08 for__l_div0cnt - 0x0000000000848c0c for__l_invcnt - 0x0000000000848c10 for__l_inecnt - 0x0000000000848c14 for__l_fmtrecl - 0x0000000000848c18 for__l_ufmtrecl - 0x0000000000848c1c for__l_blocksize - 0x0000000000848c20 for__l_buffercount - .bss 0x0000000000848c28 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_io_util.o) - *fill* 0x0000000000848c28 0x18 - .bss 0x0000000000848c40 0x440 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open.o) - 0x0000000000849060 for__l_exit_hand_decl - .bss 0x0000000000849080 0x15e0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_preconnected_units_init.o) - .bss 0x000000000084a660 0x18 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_reentrancy.o) - 0x000000000084a670 for__reentrancy_mode - 0x000000000084a674 for__reentrancy_initialized - .bss 0x000000000084a678 0x4 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_secnds.o) - .bss 0x000000000084a67c 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_stop.o) - .bss 0x000000000084a684 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_vm.o) - .bss 0x000000000084a694 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wint_fmt.o) - .bss 0x000000000084a694 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_fmt.o) - .bss 0x000000000084a694 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq_lis.o) - *fill* 0x000000000084a694 0x4 - .bss 0x000000000084a698 0xd8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - 0x000000000084a740 for__aio_global_mutex - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_open_proc.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio_wrap.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_int.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_f.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_d.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_vax_g.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cray.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_short.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ibm_long.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_double.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_ieee_single.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_close_proc.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_default_io_sizes_env_init.o) - .bss 0x000000000084a770 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_desc_item.o) - *fill* 0x000000000084a770 0x10 - .bss 0x000000000084a780 0x260 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - 0x000000000084a9a0 for__user_iomsg_buf - 0x000000000084a9a8 for__user_iomsg_len - .bss 0x000000000084a9e0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit.o) - .bss 0x000000000084a9e0 0x4 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_exit_handler.o) - 0x000000000084a9e0 for__l_exit_termination - .bss 0x000000000084a9e4 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_comp.o) - .bss 0x000000000084a9e4 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_fmt_val.o) - .bss 0x000000000084a9e4 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_get.o) - .bss 0x000000000084a9e4 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_intrp_fmt.o) - .bss 0x000000000084a9e4 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_ldir_wfs.o) - *fill* 0x000000000084a9e4 0x1c - .bss 0x000000000084aa00 0x2760 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_lub_mgt.o) - 0x000000000084aa20 for__lub_table - .bss 0x000000000084d160 0x20a0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_need_lf.o) - 0x000000000084d160 for__file_info_hash_table - .bss 0x000000000084f200 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_put.o) - .bss 0x000000000084f200 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_wseq.o) - .bss 0x000000000084f200 0x4 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(tbk_traceback.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt__globals.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_int_to_text.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_data_to_text.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_log_to_text.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_data.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_text_to_log.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_t.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_s.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvt_cvtas_x.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_s.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_t.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_s_to_a.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_t_to_a.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_s.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_t.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_a_to_x.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_x_to_a.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_nan_x.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_globals.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_53.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_64.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(cvtas_pow_ten_128.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(fetestexcept.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_stub.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_ct.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_ct.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_gen.o) - .bss 0x000000000084f204 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_gen.o) - *fill* 0x000000000084f204 0x4 - .bss 0x000000000084f208 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(libm_error.o) - .bss 0x000000000084f210 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrf.o) - .bss 0x000000000084f210 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherrl.o) - .bss 0x000000000084f210 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(matherr.o) - .bss 0x000000000084f210 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ints2q.o) - .bss 0x000000000084f210 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(qcomp.o) - .bss 0x000000000084f210 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fp2q.o) - .bss 0x000000000084f210 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(q2fp.o) - .bss 0x000000000084f210 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_display.o) - *fill* 0x000000000084f210 0x10 - .bss 0x000000000084f220 0x180 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(tbk_backtrace.o) - 0x000000000084f2c0 tbk__jmp_env - .bss 0x000000000084f3a0 0x10 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(cpu_feature_disp.o) - 0x000000000084f3a0 __intel_cpu_feature_indicator - 0x000000000084f3a8 __intel_cpu_feature_indicator_x - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemset.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(new_proc_init.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_addsubq.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_divq.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcpy.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncpy.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strlen.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strchr.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncmp.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strcat.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(sse2_strncat.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memcpy_pp.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_memset_pp.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memcpy.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memcpy.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_memmove.o) - .bss 0x000000000084f3b0 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(intel_ssse3_rep_memmove.o) - *fill* 0x000000000084f3b0 0x10 - .bss 0x000000000084f3c0 0x420 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(irc_msg_support.o) - .bss 0x000000000084f7e0 0x60 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fast_mem_ops.o) - 0x000000000084f824 __libirc_mem_ops_method - 0x000000000084f828 __libirc_largest_cachelinesize - .bss 0x000000000084f840 0x0 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(proc_init_utils.o) - .bss 0x000000000084f840 0x0 /usr/lib64/libc_nonshared.a(elf-init.oS) - .bss 0x000000000084f840 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - .bss 0x000000000084f840 0x0 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - .bss 0x000000000084f840 0x0 /usr/lib/../lib64/crtn.o - *(COMMON) - COMMON 0x000000000084f840 0x1c4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) - 0x000000000084f840 gmbdta_ - *fill* 0x000000000084fa04 0x3c - COMMON 0x000000000084fa40 0x484 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - 0x000000000084fa40 stbfr_ - 0x000000000084fb40 nulbfr_ - 0x000000000084fbc0 msgfmt_ - 0x000000000084fc40 msgcwd_ - 0x000000000084fec0 quiet_ - *fill* 0x000000000084fec4 0x1c - COMMON 0x000000000084fee0 0x2c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - 0x000000000084fee0 hrdwrd_ - *fill* 0x000000000084ff0c 0x34 - COMMON 0x000000000084ff40 0x13d628 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - 0x000000000084ff40 tables_ - *fill* 0x000000000098d568 0x18 - COMMON 0x000000000098d580 0x192e80 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - 0x000000000098d580 bitbuf_ - 0x0000000000b20380 unptyp_ - COMMON 0x0000000000b20400 0x753150 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - 0x0000000000b20400 usrint_ - 0x0000000001273480 usrstr_ - *fill* 0x0000000001273550 0x30 - COMMON 0x0000000001273580 0x804 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - 0x0000000001273580 charac_ - *fill* 0x0000000001273d84 0x3c - COMMON 0x0000000001273dc0 0xbbe88c /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - 0x0000000001273dc0 dxtab_ - 0x00000000012740c0 tababd_ - *fill* 0x0000000001e3264c 0x34 - COMMON 0x0000000001e32680 0x188d4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - 0x0000000001e32680 maxcmp_ - 0x0000000001e326a0 msgstd_ - 0x0000000001e326c0 reptab_ - 0x0000000001e32740 bufrmg_ - 0x0000000001e3eaa0 msgcmp_ - 0x0000000001e3eac0 acmode_ - 0x0000000001e3eb00 bufrsr_ - 0x0000000001e4af00 dateln_ - 0x0000000001e4af20 mrgcom_ - 0x0000000001e4af40 padesc_ - *fill* 0x0000000001e4af54 0x2c - COMMON 0x0000000001e4af80 0xfc /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - 0x0000000001e4af80 s01cm_ - 0x0000000001e4b000 sect01_ - *fill* 0x0000000001e4b07c 0x4 - COMMON 0x0000000001e4b080 0x27100 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) - 0x0000000001e4b080 usrbit_ - COMMON 0x0000000001e72180 0x4a3c0 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - 0x0000000001e72180 stcach_ - 0x0000000001eba600 stords_ - COMMON 0x0000000001ebc540 0x4 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - 0x0000000001ebc540 utgprm_ - *fill* 0x0000000001ebc544 0x3c - COMMON 0x0000000001ebc580 0x16e3600 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - 0x0000000001ebc580 usrtmp_ - COMMON 0x000000000359fb80 0x10 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - 0x000000000359fb80 tabccc_ - COMMON 0x000000000359fb90 0xc /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_init.o) - 0x000000000359fb90 for__a_argv - 0x000000000359fb98 for__l_argc - *fill* 0x000000000359fb9c 0x4 - COMMON 0x000000000359fba0 0x480 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_aio.o) - 0x000000000359fba0 thread_count_mutex - 0x000000000359fbc8 threads_in_flight_mutex - 0x000000000359fbf0 for__pthread_mutex_unlock_ptr - 0x000000000359fbf8 for__pthread_mutex_init_ptr - 0x000000000359fc00 for__pthread_mutex_lock_ptr - 0x000000000359fc20 for__aio_lub_table - COMMON 0x00000000035a0020 0x8 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libifcore.a(for_diags_intel.o) - 0x00000000035a0020 message_catalog - 0x00000000035a0028 . = ALIGN ((. != 0x0)?0x8:0x1) - -.lbss - *(.dynlbss) - *(.lbss .lbss.* .gnu.linkonce.lb.*) - *(LARGE_COMMON) - 0x00000000035a0028 . = ALIGN (0x8) - -.lrodata - *(.lrodata .lrodata.* .gnu.linkonce.lr.*) - -.ldata 0x00000000039a0028 0x0 - *(.ldata .ldata.* .gnu.linkonce.l.*) - 0x00000000039a0028 . = ALIGN ((. != 0x0)?0x8:0x1) - 0x00000000039a0028 . = ALIGN (0x8) - 0x00000000039a0028 _end = . - 0x00000000039a0028 PROVIDE (end, .) - 0x00000000039a0028 . = DATA_SEGMENT_END (.) - -.stab - *(.stab) - -.stabstr - *(.stabstr) - -.stab.excl - *(.stab.excl) - -.stab.exclstr - *(.stab.exclstr) - -.stab.index - *(.stab.index) - -.stab.indexstr - *(.stab.indexstr) - -.comment 0x0000000000000000 0x73 - *(.comment) - .comment 0x0000000000000000 0x39 /usr/lib/../lib64/crt1.o - 0x3a (size before relaxing) - .comment 0x0000000000000000 0x3a /usr/lib/../lib64/crti.o - .comment 0x0000000000000039 0x26 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtbegin.o - 0x27 (size before relaxing) - .comment 0x000000000000005f 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/for_main.o - .comment 0x0000000000000000 0x14 rdbfmsua.o - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flclos.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flflun.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltbop.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltdat.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(fltinq.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stldsp.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlstr.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmbl.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(strmst.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbrstn.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flbksp.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flinqr.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flpath.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flsopn.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssenvr.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(ssgsym.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stlcuc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(stuclc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(tbastn.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libgemlib.a(flglun.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libbridge.a(dcbsrh.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ireadns.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbf.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapn.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(posapx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgw.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readdx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readns.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readsb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(status.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbint.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ufbrw.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdlen.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(writdx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wtstat.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(adn30.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bfrini.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort2.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort_exit.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(bort.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(conwin.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cpbfdx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(drstpl.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxinit.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(dxmini.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getwin.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ibfms.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ichkstr.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ifxy.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invcon.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(invwin.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ipkm.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(irev.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupm.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lmsg.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrpc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstrps.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(msgwrt.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(newwin.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nmwrd.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nxtwin.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(ovrbs1.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(padmsg.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkbs1.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pktdd.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs01.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(pkvs1.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdbfdx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdcmps.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdtree.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdusdx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(readmg.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(seqsdx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(stndrd.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(string.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strnum.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strsuc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(trybump.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upbb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(upc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(usrtpl.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(capit.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrna.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chrtrn.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cktaba.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cnved4.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(digit.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(elemdx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(getlens.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(gets1loc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(i4dy.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(idn30.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(igetdate.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(istdesc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(iupbs01.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstchr.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(jstnum.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(lstjpb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(makestab.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(mvb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemock.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtab.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbax.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenuaa.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nenubd.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numbck.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtab.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(openbt.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parstr.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parusr.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(parutg.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rcstpl.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rdmsgb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(restd.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rsvfvm.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(strcln.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabsub.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(uptdd.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(wrdesc.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(cadn30.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(chekstab.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(inctab.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbb.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(nemtbd.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(numtbd.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(tabent.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(valx.o) - .comment 0x0000000000000000 0x27 /gpfs/hps/nco/ops/nwprod/gempak.v6.32.0/nawips/os/linux3.0.101_x86_64/lib/libncepBUFR.a(rjust.o) - .comment 0x0000000000000000 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lroundf_stub.o) - .comment 0x0000000000000000 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libimf.a(lround_stub.o) - .comment 0x0000000000000000 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemcpy.o) - .comment 0x0000000000000000 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemmove.o) - .comment 0x0000000000000000 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(fastmemset.o) - .comment 0x0000000000000000 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_addsubq.o) - .comment 0x0000000000000000 0x14 /opt/intel/composer_xe_2015.3.187/compiler/lib/intel64/libirc.a(ia32_divq.o) - .comment 0x0000000000000000 0x27 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - .comment 0x0000000000000000 0x27 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2/crtend.o - .comment 0x0000000000000000 0x3a /usr/lib/../lib64/crtn.o - -.debug - *(.debug) - -.line - *(.line) - -.debug_srcinfo - *(.debug_srcinfo) - -.debug_sfnames - *(.debug_sfnames) - -.debug_aranges 0x0000000000000000 0x90 - *(.debug_aranges) - .debug_aranges - 0x0000000000000000 0x30 /usr/lib/../lib64/crt1.o - .debug_aranges - 0x0000000000000030 0x30 /usr/lib64/libc_nonshared.a(elf-init.oS) - .debug_aranges - 0x0000000000000060 0x30 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - -.debug_pubnames - 0x0000000000000000 0x5f - *(.debug_pubnames) - .debug_pubnames - 0x0000000000000000 0x25 /usr/lib/../lib64/crt1.o - .debug_pubnames - 0x0000000000000025 0x3a /usr/lib64/libc_nonshared.a(elf-init.oS) - -.debug_info 0x0000000000000000 0x58e - *(.debug_info .gnu.linkonce.wi.*) - .debug_info 0x0000000000000000 0x102 /usr/lib/../lib64/crt1.o - .debug_info 0x0000000000000102 0x130 /usr/lib64/libc_nonshared.a(elf-init.oS) - .debug_info 0x0000000000000232 0x35c /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - -.debug_abbrev 0x0000000000000000 0x22d - *(.debug_abbrev) - .debug_abbrev 0x0000000000000000 0x5f /usr/lib/../lib64/crt1.o - .debug_abbrev 0x000000000000005f 0xd4 /usr/lib64/libc_nonshared.a(elf-init.oS) - .debug_abbrev 0x0000000000000133 0xfa /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - -.debug_line 0x0000000000000000 0x22c - *(.debug_line) - .debug_line 0x0000000000000000 0x88 /usr/lib/../lib64/crt1.o - .debug_line 0x0000000000000088 0x96 /usr/lib64/libc_nonshared.a(elf-init.oS) - .debug_line 0x000000000000011e 0x10e /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - -.debug_frame 0x0000000000000000 0x58 - *(.debug_frame) - .debug_frame 0x0000000000000000 0x58 /usr/lib64/libc_nonshared.a(elf-init.oS) - -.debug_str 0x0000000000000000 0x428 - *(.debug_str) - .debug_str 0x0000000000000000 0x90 /usr/lib/../lib64/crt1.o - 0xd0 (size before relaxing) - .debug_str 0x0000000000000090 0x6a /usr/lib64/libc_nonshared.a(elf-init.oS) - 0xe0 (size before relaxing) - .debug_str 0x00000000000000fa 0x32e /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - 0x3ba (size before relaxing) - -.debug_loc 0x0000000000000000 0x1b7 - *(.debug_loc) - .debug_loc 0x0000000000000000 0xfe /usr/lib64/libc_nonshared.a(elf-init.oS) - .debug_loc 0x00000000000000fe 0xb9 /opt/gcc/4.9.2/snos/lib/gcc/x86_64-suse-linux/4.9.2//libgcc.a(_powidf2.o) - -.debug_macinfo - *(.debug_macinfo) - -.debug_weaknames - *(.debug_weaknames) - -.debug_funcnames - *(.debug_funcnames) - -.debug_typenames - *(.debug_typenames) - -.debug_varnames - *(.debug_varnames) - -.debug_pubtypes - *(.debug_pubtypes) - -.debug_ranges 0x0000000000000000 0x50 - *(.debug_ranges) - .debug_ranges 0x0000000000000000 0x50 /usr/lib64/libc_nonshared.a(elf-init.oS) - -.debug_macro - *(.debug_macro) - -.gnu.attributes - *(.gnu.attributes) - -/DISCARD/ - *(.note.GNU-stack) - *(.gnu_debuglink) - *(.gnu.lto_*) -OUTPUT(rdbfmsua elf64-x86-64) diff --git a/util/sorc/rdbfmsua.fd/README b/util/sorc/rdbfmsua.fd/README deleted file mode 100755 index 4128761bcf..0000000000 --- a/util/sorc/rdbfmsua.fd/README +++ /dev/null @@ -1,2 +0,0 @@ -added libgem.a and changed libbufr_4_32 to 64-bit. -also changed -m32 -m64. diff --git a/util/sorc/rdbfmsua.fd/README.new b/util/sorc/rdbfmsua.fd/README.new deleted file mode 100755 index f72a61f38a..0000000000 --- a/util/sorc/rdbfmsua.fd/README.new +++ /dev/null @@ -1,10 +0,0 @@ -added libgem.a and changed libbufr_4_32 to 64-bit. -also changed -m32 -m64. - -# JY - 02/09/2016 -Run the following command before run the "make" - . /nwprod/gempak/.gempak - -# Boi - 09/10/2016 -on CRAY -module load gempak/6.32.0 diff --git a/util/sorc/rdbfmsua.fd/compile_rdbfmsua_wcoss.sh b/util/sorc/rdbfmsua.fd/compile_rdbfmsua_wcoss.sh deleted file mode 100755 index 63d0ccdb2a..0000000000 --- a/util/sorc/rdbfmsua.fd/compile_rdbfmsua_wcoss.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/sh - -###################################################################### -# -# Build executable : GFS utilities -# -###################################################################### - -LMOD_EXACT_MATCH=no -source ../../../sorc/machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) - -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then - echo " " - echo " You are on WCOSS: $target " - echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V15.0.0 " - echo " " - echo " " - exit -else - echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." - echo " The script $0 can not continue. Aborting!" - echo " " - exit -fi -echo " " - -# Load required modules -source ../../modulefiles/gfs_util.${target} -module list - -set -x - -mkdir -p ../../exec -make -f makefile.$target -make -f makefile.$target clean -mv rdbfmsua ../../exec diff --git a/util/sorc/rdbfmsua.fd/makefile b/util/sorc/rdbfmsua.fd/makefile deleted file mode 100755 index 69d183f394..0000000000 --- a/util/sorc/rdbfmsua.fd/makefile +++ /dev/null @@ -1,84 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 11:21:07 AM on 10/28/94 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -OBJS= rdbfmsua.o - - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# -FC = ifort -# FFLAGS = -O3 -q32 -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -# FFLAGS = -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -FFLAGS = -I${GEMINC} -I${OS_INC} -# LDFLAGS = -O3 -q32 -s -# LDFLAGS = -Wl,-Map,MAPFILE - -# BRIDGE=/gpfs/dell1/nco/ops/nwpara/gempak.v7.3.1/nawips/os/linux3.10.0_x86_64/lib/libbridge.a -BRIDGE=${GEMOLB}/libbridge.a - -LIBS = ${DECOD_UT_LIB} ${BUFR_LIB4} \ - -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} - -# -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} -# -L/nwprod/gempak/nawips1/os/linux2.6.32_x86_64/lib -lgemlib -lappl -lsyslib -lcgemlib -lbridge -lncepBUFR \ -# -lgfortran - -CMD = rdbfmsua - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# CFLAGS= -O3 -q32 - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - - -# The following rule reads the required NAWIPS definitions and then recursively -# runs this same makefile with a new target in the spawned shell. -# - -clean: - -rm -f ${OBJS} - -clobber: clean - -rm -f ${CMD} - -void: clobber - -rm -f ${SRCS} makefile diff --git a/util/sorc/rdbfmsua.fd/makefile.hera b/util/sorc/rdbfmsua.fd/makefile.hera deleted file mode 100755 index a1359e6cb8..0000000000 --- a/util/sorc/rdbfmsua.fd/makefile.hera +++ /dev/null @@ -1,88 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 11:21:07 AM on 10/28/94 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -OBJS= rdbfmsua.o - - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# -FC = ifort -# FFLAGS = -O3 -q32 -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -# FFLAGS = -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -FFLAGS = -I${GEMINC} -I${OS_INC} -# LDFLAGS = -O3 -q32 -s -# LDFLAGS = -Wl,-Map,MAPFILE - -# BRIDGE=/gpfs/dell1/nco/ops/nwpara/gempak.v7.3.1/nawips/os/linux3.10.0_x86_64/lib/libbridge.a -BRIDGE=${GEMOLB}/bridge.a -GFORTRAN=/apps/gcc/6.2.0/lib64 - -LIBS = ${DECOD_UT_LIB} ${BUFR_LIB4} \ - ${GEMLIB}/gemlib.a ${GEMLIB}/appl.a ${GEMLIB}/syslib.a ${GEMLIB}/cgemlib.a -L${GFORTRAN} -lgfortran ${BRIDGE} - -# LIBS = ${DECOD_UT_LIB} ${BUFR_LIB4} \ -# -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} - -# -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} -# -L/nwprod/gempak/nawips1/os/linux2.6.32_x86_64/lib -lgemlib -lappl -lsyslib -lcgemlib -lbridge -lncepBUFR \ -# -lgfortran - -CMD = rdbfmsua - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# CFLAGS= -O3 -q32 - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - - -# The following rule reads the required NAWIPS definitions and then recursively -# runs this same makefile with a new target in the spawned shell. -# - -clean: - -rm -f ${OBJS} - -clobber: clean - -rm -f ${CMD} - -void: clobber - -rm -f ${SRCS} makefile diff --git a/util/sorc/rdbfmsua.fd/makefile.wcoss_cray b/util/sorc/rdbfmsua.fd/makefile.wcoss_cray deleted file mode 100755 index 69d183f394..0000000000 --- a/util/sorc/rdbfmsua.fd/makefile.wcoss_cray +++ /dev/null @@ -1,84 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 11:21:07 AM on 10/28/94 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -OBJS= rdbfmsua.o - - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# -FC = ifort -# FFLAGS = -O3 -q32 -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -# FFLAGS = -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -FFLAGS = -I${GEMINC} -I${OS_INC} -# LDFLAGS = -O3 -q32 -s -# LDFLAGS = -Wl,-Map,MAPFILE - -# BRIDGE=/gpfs/dell1/nco/ops/nwpara/gempak.v7.3.1/nawips/os/linux3.10.0_x86_64/lib/libbridge.a -BRIDGE=${GEMOLB}/libbridge.a - -LIBS = ${DECOD_UT_LIB} ${BUFR_LIB4} \ - -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} - -# -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} -# -L/nwprod/gempak/nawips1/os/linux2.6.32_x86_64/lib -lgemlib -lappl -lsyslib -lcgemlib -lbridge -lncepBUFR \ -# -lgfortran - -CMD = rdbfmsua - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# CFLAGS= -O3 -q32 - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - - -# The following rule reads the required NAWIPS definitions and then recursively -# runs this same makefile with a new target in the spawned shell. -# - -clean: - -rm -f ${OBJS} - -clobber: clean - -rm -f ${CMD} - -void: clobber - -rm -f ${SRCS} makefile diff --git a/util/sorc/rdbfmsua.fd/makefile.wcoss_dell_p3 b/util/sorc/rdbfmsua.fd/makefile.wcoss_dell_p3 deleted file mode 100755 index 69d183f394..0000000000 --- a/util/sorc/rdbfmsua.fd/makefile.wcoss_dell_p3 +++ /dev/null @@ -1,84 +0,0 @@ -SHELL=/bin/sh -# -# This makefile was produced by /usr/bin/fmgen at 11:21:07 AM on 10/28/94 -# If it is invoked by the command line -# make -f makefile -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable named a.out. -# -# If it is invoked by the command line -# make -f makefile a.out.prof -# it will compile the fortran modules indicated by SRCS into the object -# modules indicated by OBJS and produce an executable which profiles -# named a.out.prof. -# -# To remove all the objects but leave the executables use the command line -# make -f makefile clean -# -# To remove everything but the source files use the command line -# make -f makefile clobber -# -# To remove the source files created by /usr/bin/fmgen and this makefile -# use the command line -# make -f makefile void -# -# The parameters SRCS and OBJS should not need to be changed. If, however, -# you need to add a new module add the name of the source module to the -# SRCS parameter and add the name of the resulting object file to the OBJS -# parameter. The new modules are not limited to fortran, but may be C, YACC, -# LEX, or CAL. An explicit rule will need to be added for PASCAL modules. -# -OBJS= rdbfmsua.o - - -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# -FC = ifort -# FFLAGS = -O3 -q32 -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -# FFLAGS = -I${GEMINC} -I${NAWIPS}/os/${NA_OS}/include -FFLAGS = -I${GEMINC} -I${OS_INC} -# LDFLAGS = -O3 -q32 -s -# LDFLAGS = -Wl,-Map,MAPFILE - -# BRIDGE=/gpfs/dell1/nco/ops/nwpara/gempak.v7.3.1/nawips/os/linux3.10.0_x86_64/lib/libbridge.a -BRIDGE=${GEMOLB}/libbridge.a - -LIBS = ${DECOD_UT_LIB} ${BUFR_LIB4} \ - -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} - -# -L${GEMOLB} -lgemlib -lappl -lsyslib -lcgemlib -lgfortran ${BRIDGE} -# -L/nwprod/gempak/nawips1/os/linux2.6.32_x86_64/lib -lgemlib -lappl -lsyslib -lcgemlib -lbridge -lncepBUFR \ -# -lgfortran - -CMD = rdbfmsua - -# To perform the default compilation, use the first line -# To compile with flowtracing turned on, use the second line -# To compile giving profile additonal information, use the third line -# CFLAGS= -O3 -q32 - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# - -$(CMD): $(OBJS) - $(FC) $(LDFLAGS) -o $(@) $(OBJS) $(LIBS) - - -# The following rule reads the required NAWIPS definitions and then recursively -# runs this same makefile with a new target in the spawned shell. -# - -clean: - -rm -f ${OBJS} - -clobber: clean - -rm -f ${CMD} - -void: clobber - -rm -f ${SRCS} makefile diff --git a/util/sorc/rdbfmsua.fd/rdbfmsua.f b/util/sorc/rdbfmsua.fd/rdbfmsua.f deleted file mode 100755 index c2d5088920..0000000000 --- a/util/sorc/rdbfmsua.fd/rdbfmsua.f +++ /dev/null @@ -1,398 +0,0 @@ - PROGRAM RDBFUA -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C -C MAIN PROGRAM: RDBFUA -C PRGMMR: J. ATOR ORG: NP12 DATE: 2007-08-13 -C -C ABSTRACT: Upper Air Plotted Data for levels 1000MB; 925MB; 850MB; 700MB; -C 500MB; 400MB; 300MB; 250MB; 200MB; 150MB, and 100MB for the -C following regions: 1)United States; 2)Canada; 3)Alaska; and, -C the 4)Mexico and Caribbean. Note that Alaska includes eastern -C Russia. Also adding South America, Africa, and the Pacific. -C -C PROGRAM HISTORY LOG: -C -C 2007-08-13 J. ATOR -- ORIGINAL AUTHOR -C 2007-08-20 C. Magee -- Added block 25 (eastern Russia) -C 2007-09-20 S. Lilly -- Changing to read blks 60 thru 91. -C 2007-09-20 C. Magee -- Added code to read upper air and metar stn tables -C 2007-09-25 S. Lilly -- Added logic to write statements in order to put STID, -C STNM and TIME on the same line. -C 2007-09-27 C. Magee -- Change output for stntbl.out. Use st_rmbl to remove -C leading blank from reportid if internal write was -C used to convert integer WMO block/stn number to -C char report id. -C 2012-01-24 J. Cahoon -- Modified from original RDBFUA to include -C significant and standard together in output -C 2012-02-15 B. Mabe -- Changed Program name and output file to reflect -C change to output for sig and man data -C 2016-10-18 B. Vuong -- Removed hardwire '/nwprod/dictionaries/' in CALL FL_TBOP -C 2020-01-15 B. Vuong -- Increased dimensional array r8lvl(6,200) -C -C USAGE: -C INPUT FILES: -C UNIT 40 - adpupa dumpfile (contains data from BUFR tank b002/xx001) -C -C sonde.land.tbl -C metar.tbl -C -C OUTPUT FILES: -C UNIT 51 - rdbfmsua.out - contains ASCII upper air data for the desired -C stations. -C UNIT 52 - stnmstbl.out - contains ASCII station table info for use by -C html generator. -C -C SUBPROGRAMS CALLED: -C UNIQUE: -C LIBRARY: BUFRLIB - OPENBF UFBINT -C GEMLIB - FL_TBOP ST_RMBL TB_RSTN -C BRIDGE - DC_BSRH -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN -C -C REMARKS: -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN 90 -C MACHINE : IBM-SP -C -C$$$ - INCLUDE 'GEMPRM.PRM' - INCLUDE 'BRIDGE.PRM' -C*---------------------------------------------------------------------- -C* Set the name of the output file. -C*---------------------------------------------------------------------- - - CHARACTER*(*) FLO, STNO - - PARAMETER ( FLO = 'rdbfmsua.out' ) - PARAMETER ( STNO = 'sonde.idsms.tbl' ) - - REAL*8 BFMSNG - PARAMETER ( BFMSNG = 10.0E10 ) - - PARAMETER ( GPMSNG = -9999.0 ) - PARAMETER ( MAXSTN = 10000 ) - - REAL*8 r8hdr ( 9, 1 ), r8lvl ( 6, 200 ), r8arr( 1, 1 ) - REAL*8 r8tmp ( 6, 100 ), r8out ( 6, 300 ),swpbuf - REAL*8 r8tmptot ( 6, 300 ) - - CHARACTER*8 cmgtag, reportid - CHARACTER stnnam*32, tbchrs*20, state*2, tabcon*2 - CHARACTER ldcoun( LLSTFL )*2, mtcoun ( MAXSTN )*2 - CHARACTER ldstid ( LLSTFL )*8, mtstid ( MAXSTN )*8 - INTEGER ldstnm ( LLSTFL ), mtstnm ( MAXSTN ), ispri - INTEGER itabnum - REAL slat, slon, selv - LOGICAL nomatch, needHeader - -C*---------------------------------------------------------------------- -C* Open and read the sonde land station table. -C*---------------------------------------------------------------------- - CALL FL_TBOP ( 'sonde.land.tbl', - + 'stns', iunltb, iertop ) - IF ( iertop .ne. 0 ) THEN - print*,' error opening sonde land station table' - END IF - - ii = 1 - ierrst = 0 - DO WHILE ( ( ii .le. LLSTFL ) .and. ( ierrst .eq. 0 ) ) - CALL TB_RSTN ( iunltb, ldstid (ii), stnnam, ldstnm (ii), - + state, ldcoun (ii), slat, slon, - + selv, ispri, tbchrs, ierrst ) - ii = ii + 1 - END DO - IF ( ierrst .eq. -1 ) THEN - numua = ii - 1 - END IF -C*---------------------------------------------------------------------- -C* Close the sonde land station table file. -C*---------------------------------------------------------------------- - CALL FL_CLOS ( iunltb, iercls ) -C*---------------------------------------------------------------------- -C* Open and read the metar station table. -C*---------------------------------------------------------------------- - CALL FL_TBOP ( 'metar_stnm.tbl', - + 'stns', iunmtb, iertop ) - IF ( iertop .ne. 0 ) THEN - print*,' error opening metar station table' - END IF - - jj = 1 - ierrst = 0 - DO WHILE ( ( jj .le. MAXSTN ) .and. ( ierrst .eq. 0 ) ) - CALL TB_RSTN ( iunmtb, mtstid (jj), stnnam, mtstnm (jj), - + state, mtcoun(jj), slat, slon, - + selv, ispri, tbchrs, ierrst ) - jj = jj + 1 - END DO - IF ( ierrst .eq. -1 ) THEN - nummet = jj - 1 - END IF -C*---------------------------------------------------------------------- -C* Close the metar station table file. -C*---------------------------------------------------------------------- - CALL FL_CLOS ( iunmtb, iercls ) -C*---------------------------------------------------------------------- -C* Open and initialize the output files. -C*---------------------------------------------------------------------- - - OPEN ( UNIT = 51, FILE = FLO ) - WRITE ( 51, FMT = '(A)' ) 'PARM=PRES;HGHT;TMPK;DWPK;DRCT;SPED' - OPEN ( UNIT = 52, FILE = STNO) - -C*---------------------------------------------------------------------- -C* Open the BUFR file. -C*---------------------------------------------------------------------- - - CALL OPENBF ( 40, 'IN', 40 ) - -C*---------------------------------------------------------------------- -C* Read a BUFR subset from the BUFR file. -C*---------------------------------------------------------------------- - - DO WHILE ( IREADNS ( 40, cmgtag, imgdt ) .eq. 0 ) - - IF ( cmgtag .eq. 'NC002001' ) THEN - -C*---------------------------------------------------------------------- -C* Unpack the header information from this subset. -C*---------------------------------------------------------------------- - - CALL UFBINT ( 40, r8hdr, 9, 1, nlev, - + 'WMOB WMOS CLAT CLON SELV YEAR MNTH DAYS HOUR' ) - - IF ( ( ( r8hdr(1,1) .ge. 60 ) .and. - + ( r8hdr(1,1) .le. 91 ) ) .or. - + ( r8hdr(1,1) .eq. 25 ) ) THEN - -C*---------------------------------------------------------------------- -C* Unpack the level information from this subset. -C* and replicate for VISG =2,4,and 32 -C*---------------------------------------------------------------------- - levelit = 0 - needHeader = .true. - nlevtot = 0 - DO WHILE ( levelit .le. 2 ) - IF ( levelit .eq. 0 ) THEN - CALL UFBINT ( 40, r8lvl, 6, 50, nlev, - + 'VSIG=2 PRLC GP10 TMDB TMDP WDIR WSPD' ) - ELSE IF ( levelit .eq. 1 ) THEN - CALL UFBINT ( 40, r8lvl, 6, 50, nlev, - + 'VSIG=4 PRLC GP10 TMDB TMDP WDIR WSPD' ) - ELSE IF ( levelit .eq. 2 ) THEN - CALL UFBINT ( 40, r8lvl, 6, 50, nlev, - + 'VSIG=32 PRLC GP10 TMDB TMDP WDIR WSPD' ) - END IF - IF ( nlev .gt. 0 ) THEN -C*---------------------------------------------------------------------- -C* Find the corresponding 3 or 4 character ID -C* in the sonde land station table. Store into -C* reportid only if non-blank. -C*---------------------------------------------------------------------- - iblkstn = NINT( r8hdr(1,1)*1000 + r8hdr(2,1) ) - nomatch = .true. - CALL DC_BSRH ( iblkstn, ldstnm, numua, - + ii, iersrh ) - IF ( iersrh .ge. 0 ) THEN - reportid = ldstid(ii) - tabcon = ldcoun(ii) - itabnum = ldstnm(ii) - IF ( ldstid (ii) .ne. ' ') THEN - nomatch = .false. - END IF - END IF -C*---------------------------------------------------------------------- -C* Either no match in sonde land table or tdstid -C* was found but ldstid was blank, so check metar -C* table for match and non-blank char id. -C*---------------------------------------------------------------------- - IF ( nomatch ) THEN - mblkstn = INT( iblkstn * 10 ) - CALL DC_BSRH ( mblkstn, mtstnm, nummet, - + jj, iersrh ) - IF ( iersrh .ge. 0 ) THEN - reportid = mtstid(jj) - tabcon = mtcoun(jj) - itabnum = mtstnm(jj) - nomatch = .false. - END IF - END IF -C*---------------------------------------------------------------------- -C* If no header, build it -C*---------------------------------------------------------------------- - IF ( needHeader ) THEN -C*---------------------------------------------------------------------- -C* Write the data to the output file. -C*---------------------------------------------------------------------- - IF ( reportid .ne. ' ' ) THEN -C*---------------------------------------------------------------------- -C* 3- or 4-char ID found. -C*---------------------------------------------------------------------- - WRITE ( 51, - + FMT = '(/,A,A5,3X,A,I2,I3.3,3x,A,3I2.2,A,2I2.2)' ) - + 'STID=', reportid(1:5), - + 'STNM=', INT(r8hdr(1,1)), INT(r8hdr(2,1)), - + 'TIME=', MOD(NINT(r8hdr(6,1)),100), - + NINT(r8hdr(7,1)), NINT(r8hdr(8,1)), - + '/', NINT(r8hdr(9,1)), 0 - WRITE ( 51, - + FMT = '(2(A,F7.2,1X),A,F7.1)' ) - + 'SLAT=', r8hdr(3,1), - + 'SLON=', r8hdr(4,1), - + 'SELV=', r8hdr(5,1) - ELSE -C*---------------------------------------------------------------------- -C* write WMO block/station instead -C*---------------------------------------------------------------------- - WRITE ( 51, - + FMT = '(/,A,I2,I3.3,3X,A,I2,I3.3,3x,A,3I2.2,A,2I2.2)' ) - + 'STID=', INT(r8hdr(1,1)), INT(r8hdr(2,1)), - + 'STNM=', INT(r8hdr(1,1)), INT(r8hdr(2,1)), - + 'TIME=', MOD(NINT(r8hdr(6,1)),100), - + NINT(r8hdr(7,1)), NINT(r8hdr(8,1)), - + '/', NINT(r8hdr(9,1)), 0 - WRITE ( 51, - + FMT = '(2(A,F7.2,1X),A,F7.1)' ) - + 'SLAT=', r8hdr(3,1), - + 'SLON=', r8hdr(4,1), - + 'SELV=', r8hdr(5,1) - END IF - - - WRITE ( 51, FMT = '(/,6(A8,1X))' ) - + 'PRES', 'HGHT', 'TMPK', 'DWPK', 'DRCT', 'SPED' - needHeader = .false. - END IF - DO jj = 1, nlev - -C*---------------------------------------------------------------------- -C* Convert pressure to millibars. -C*---------------------------------------------------------------------- - - IF ( r8lvl(1,jj) .lt. BFMSNG ) THEN - r8lvl(1,jj) = r8lvl (1,jj) / 100.0 - ELSE - r8lvl(1,jj) = GPMSNG - END IF - -C*---------------------------------------------------------------------- -C* Convert geopotential to height in meters. -C*---------------------------------------------------------------------- - - IF ( r8lvl(2,jj) .lt. BFMSNG ) THEN - r8lvl (2,jj) = r8lvl (2,jj) / 9.8 - ELSE - r8lvl (2,jj) = GPMSNG - END IF - - DO ii = 3, 6 - IF ( r8lvl(ii,jj) .ge. BFMSNG ) THEN - r8lvl (ii,jj) = GPMSNG - END IF - END DO - END DO -C*---------------------------------------------------------------------- -C* itterate through levels and add to total array -C* ignore -9999 and 0 pressure levels -C*---------------------------------------------------------------------- - IF ( nlevtot .eq. 0 ) THEN - nlevtot = 1 - END IF - DO jj = 1,nlev - IF ( r8lvl(1,jj) .gt. 99 ) THEN - DO ii = 1,6 - r8tmptot(ii,nlevtot) = r8lvl(ii,jj) - END DO - nlevtot = nlevtot + 1 - END IF - END DO - nlevtot = nlevtot - 1 - END IF - levelit = levelit + 1 - END DO -C*--------------------------------------------------------------------- -C* bubble sort so output starts at lowest level of the -C* atmosphere (usu. 1000mb), only if there are available -C* levels -C*--------------------------------------------------------------------- - IF (nlevtot .gt. 0) THEN - istop = nlevtot - 1 - iswflg = 1 - DO WHILE ( ( iswflg .ne. 0 ) .and. - + ( istop .ge. 1 ) ) - iswflg = 0 -C - DO j = 1, istop - IF ( r8tmptot(1,j) .lt. r8tmptot(1,j+1) ) THEN - iswflg = 1 - DO i = 1,6 - swpbuf = r8tmptot (i,j) - r8tmptot (i,j) = r8tmptot (i,j+1) - r8tmptot (i,j+1) = swpbuf - END DO - END IF - END DO - istop = istop-1 - END DO -C*--------------------------------------------------------------------- -C* check for exact or partial dupes and only write -C* one line for each level to output file. -C*--------------------------------------------------------------------- - DO jj = 1,nlevtot - DO ii = 1,6 - r8out(ii,jj) = r8tmptot(ii,jj) - END DO - END DO - - kk = 1 - DO jj = 1,nlevtot-1 - IF ( r8out(1,kk) .eq. r8tmptot(1,jj+1) ) THEN - r8out(1,kk) = r8tmptot(1,jj) - DO ii = 2,6 - IF ( r8out(ii,kk) .lt. r8tmptot(ii,jj+1)) - + THEN - r8out(ii,kk) = r8tmptot(ii,jj+1) - END IF - END DO - ELSE - kk = kk + 1 - r8out(1,kk) = r8tmptot(1,jj+1) - r8out(2,kk) = r8tmptot(2,jj+1) - r8out(3,kk) = r8tmptot(3,jj+1) - r8out(4,kk) = r8tmptot(4,jj+1) - r8out(5,kk) = r8tmptot(5,jj+1) - r8out(6,kk) = r8tmptot(6,jj+1) - END IF - END DO -C*---------------------------------------------------------------------- -C* write pres, hght, temp, dew point, wind dir, -C* and wind speed to output file. -C*---------------------------------------------------------------------- - DO jj = 1,kk - WRITE ( 51, FMT = '(6(F8.2,1X))' ) - + ( r8out (ii,jj), ii = 1,6 ) - END DO -C*---------------------------------------------------------------------- -C* Write info for the current station to new table. -C* Includes reportid, lat, lon, country, and blk/ -C* stn. -C*---------------------------------------------------------------------- - IF ( reportid .eq. ' ') THEN - WRITE ( reportid(1:6),FMT='(I6)') itabnum - CALL ST_RMBL ( reportid,reportid,len,iret ) - END IF - WRITE ( 52, FMT = '(A6,F7.2,1X,F7.2, - + 1X,A2,1x,I6)' ) - + reportid(1:6),r8hdr(3,1),r8hdr(4,1), - + tabcon,itabnum - END IF - END IF - END IF - END DO - - STOP - END diff --git a/util/sorc/rdbfmsua.fd/rdbfmsua.f_org b/util/sorc/rdbfmsua.fd/rdbfmsua.f_org deleted file mode 100755 index 343c985fcb..0000000000 --- a/util/sorc/rdbfmsua.fd/rdbfmsua.f_org +++ /dev/null @@ -1,397 +0,0 @@ - PROGRAM RDBFUA -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C -C MAIN PROGRAM: RDBFUA -C PRGMMR: J. ATOR ORG: NP12 DATE: 2007-08-13 -C -C ABSTRACT: Upper Air Plotted Data for levels 1000MB; 925MB; 850MB; 700MB; -C 500MB; 400MB; 300MB; 250MB; 200MB; 150MB, and 100MB for the -C following regions: 1)United States; 2)Canada; 3)Alaska; and, -C the 4)Mexico and Caribbean. Note that Alaska includes eastern -C Russia. Also adding South America, Africa, and the Pacific. -C -C PROGRAM HISTORY LOG: -C -C 2007-08-13 J. ATOR -- ORIGINAL AUTHOR -C 2007-08-20 C. Magee -- Added block 25 (eastern Russia) -C 2007-09-20 S. Lilly -- Changing to read blks 60 thru 91. -C 2007-09-20 C. Magee -- Added code to read upper air and metar stn tables -C 2007-09-25 S. Lilly -- Added logic to write statements in order to put STID, -C STNM and TIME on the same line. -C 2007-09-27 C. Magee -- Change output for stntbl.out. Use st_rmbl to remove -C leading blank from reportid if internal write was -C used to convert integer WMO block/stn number to -C char report id. -C 2012-01-24 J. Cahoon -- Modified from original RDBFUA to include -C significant and standard together in output -C 2012-02-15 B. Mabe -- Changed Program name and output file to reflect -C change to output for sig and man data -C 2016-10-18 B. Vuong -- Removed hardwire '/nwprod/dictionaries/' in CALL FL_TBOP -C -C USAGE: -C INPUT FILES: -C UNIT 40 - adpupa dumpfile (contains data from BUFR tank b002/xx001) -C -C sonde.land.tbl -C metar.tbl -C -C OUTPUT FILES: -C UNIT 51 - rdbfmsua.out - contains ASCII upper air data for the desired -C stations. -C UNIT 52 - stnmstbl.out - contains ASCII station table info for use by -C html generator. -C -C SUBPROGRAMS CALLED: -C UNIQUE: -C LIBRARY: BUFRLIB - OPENBF UFBINT -C GEMLIB - FL_TBOP ST_RMBL TB_RSTN -C BRIDGE - DC_BSRH -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN -C -C REMARKS: -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN 90 -C MACHINE : IBM-SP -C -C$$$ - INCLUDE 'GEMPRM.PRM' - INCLUDE 'BRIDGE.PRM' -C*---------------------------------------------------------------------- -C* Set the name of the output file. -C*---------------------------------------------------------------------- - - CHARACTER*(*) FLO, STNO - - PARAMETER ( FLO = 'rdbfmsua.out' ) - PARAMETER ( STNO = 'sonde.idsms.tbl' ) - - REAL*8 BFMSNG - PARAMETER ( BFMSNG = 10.0E10 ) - - PARAMETER ( GPMSNG = -9999.0 ) - PARAMETER ( MAXSTN = 10000 ) - - REAL*8 r8hdr ( 9, 1 ), r8lvl ( 6, 100 ), r8arr( 1, 1 ) - REAL*8 r8tmp ( 6, 100 ), r8out ( 6, 300 ),swpbuf - REAL*8 r8tmptot ( 6, 300 ) - - CHARACTER*8 cmgtag, reportid - CHARACTER stnnam*32, tbchrs*20, state*2, tabcon*2 - CHARACTER ldcoun( LLSTFL )*2, mtcoun ( MAXSTN )*2 - CHARACTER ldstid ( LLSTFL )*8, mtstid ( MAXSTN )*8 - INTEGER ldstnm ( LLSTFL ), mtstnm ( MAXSTN ), ispri - INTEGER itabnum - REAL slat, slon, selv - LOGICAL nomatch, needHeader - -C*---------------------------------------------------------------------- -C* Open and read the sonde land station table. -C*---------------------------------------------------------------------- - CALL FL_TBOP ( 'sonde.land.tbl', - + 'stns', iunltb, iertop ) - IF ( iertop .ne. 0 ) THEN - print*,' error opening sonde land station table' - END IF - - ii = 1 - ierrst = 0 - DO WHILE ( ( ii .le. LLSTFL ) .and. ( ierrst .eq. 0 ) ) - CALL TB_RSTN ( iunltb, ldstid (ii), stnnam, ldstnm (ii), - + state, ldcoun (ii), slat, slon, - + selv, ispri, tbchrs, ierrst ) - ii = ii + 1 - END DO - IF ( ierrst .eq. -1 ) THEN - numua = ii - 1 - END IF -C*---------------------------------------------------------------------- -C* Close the sonde land station table file. -C*---------------------------------------------------------------------- - CALL FL_CLOS ( iunltb, iercls ) -C*---------------------------------------------------------------------- -C* Open and read the metar station table. -C*---------------------------------------------------------------------- - CALL FL_TBOP ( 'metar_stnm.tbl', - + 'stns', iunmtb, iertop ) - IF ( iertop .ne. 0 ) THEN - print*,' error opening metar station table' - END IF - - jj = 1 - ierrst = 0 - DO WHILE ( ( jj .le. MAXSTN ) .and. ( ierrst .eq. 0 ) ) - CALL TB_RSTN ( iunmtb, mtstid (jj), stnnam, mtstnm (jj), - + state, mtcoun(jj), slat, slon, - + selv, ispri, tbchrs, ierrst ) - jj = jj + 1 - END DO - IF ( ierrst .eq. -1 ) THEN - nummet = jj - 1 - END IF -C*---------------------------------------------------------------------- -C* Close the metar station table file. -C*---------------------------------------------------------------------- - CALL FL_CLOS ( iunmtb, iercls ) -C*---------------------------------------------------------------------- -C* Open and initialize the output files. -C*---------------------------------------------------------------------- - - OPEN ( UNIT = 51, FILE = FLO ) - WRITE ( 51, FMT = '(A)' ) 'PARM=PRES;HGHT;TMPK;DWPK;DRCT;SPED' - OPEN ( UNIT = 52, FILE = STNO) - -C*---------------------------------------------------------------------- -C* Open the BUFR file. -C*---------------------------------------------------------------------- - - CALL OPENBF ( 40, 'IN', 40 ) - -C*---------------------------------------------------------------------- -C* Read a BUFR subset from the BUFR file. -C*---------------------------------------------------------------------- - - DO WHILE ( IREADNS ( 40, cmgtag, imgdt ) .eq. 0 ) - - IF ( cmgtag .eq. 'NC002001' ) THEN - -C*---------------------------------------------------------------------- -C* Unpack the header information from this subset. -C*---------------------------------------------------------------------- - - CALL UFBINT ( 40, r8hdr, 9, 1, nlev, - + 'WMOB WMOS CLAT CLON SELV YEAR MNTH DAYS HOUR' ) - - IF ( ( ( r8hdr(1,1) .ge. 60 ) .and. - + ( r8hdr(1,1) .le. 91 ) ) .or. - + ( r8hdr(1,1) .eq. 25 ) ) THEN - -C*---------------------------------------------------------------------- -C* Unpack the level information from this subset. -C* and replicate for VISG =2,4,and 32 -C*---------------------------------------------------------------------- - levelit = 0 - needHeader = .true. - nlevtot = 0 - DO WHILE ( levelit .le. 2 ) - IF ( levelit .eq. 0 ) THEN - CALL UFBINT ( 40, r8lvl, 6, 50, nlev, - + 'VSIG=2 PRLC GP10 TMDB TMDP WDIR WSPD' ) - ELSE IF ( levelit .eq. 1 ) THEN - CALL UFBINT ( 40, r8lvl, 6, 50, nlev, - + 'VSIG=4 PRLC GP10 TMDB TMDP WDIR WSPD' ) - ELSE IF ( levelit .eq. 2 ) THEN - CALL UFBINT ( 40, r8lvl, 6, 50, nlev, - + 'VSIG=32 PRLC GP10 TMDB TMDP WDIR WSPD' ) - END IF - IF ( nlev .gt. 0 ) THEN -C*---------------------------------------------------------------------- -C* Find the corresponding 3 or 4 character ID -C* in the sonde land station table. Store into -C* reportid only if non-blank. -C*---------------------------------------------------------------------- - iblkstn = NINT( r8hdr(1,1)*1000 + r8hdr(2,1) ) - nomatch = .true. - CALL DC_BSRH ( iblkstn, ldstnm, numua, - + ii, iersrh ) - IF ( iersrh .ge. 0 ) THEN - reportid = ldstid(ii) - tabcon = ldcoun(ii) - itabnum = ldstnm(ii) - IF ( ldstid (ii) .ne. ' ') THEN - nomatch = .false. - END IF - END IF -C*---------------------------------------------------------------------- -C* Either no match in sonde land table or tdstid -C* was found but ldstid was blank, so check metar -C* table for match and non-blank char id. -C*---------------------------------------------------------------------- - IF ( nomatch ) THEN - mblkstn = INT( iblkstn * 10 ) - CALL DC_BSRH ( mblkstn, mtstnm, nummet, - + jj, iersrh ) - IF ( iersrh .ge. 0 ) THEN - reportid = mtstid(jj) - tabcon = mtcoun(jj) - itabnum = mtstnm(jj) - nomatch = .false. - END IF - END IF -C*---------------------------------------------------------------------- -C* If no header, build it -C*---------------------------------------------------------------------- - IF ( needHeader ) THEN -C*---------------------------------------------------------------------- -C* Write the data to the output file. -C*---------------------------------------------------------------------- - IF ( reportid .ne. ' ' ) THEN -C*---------------------------------------------------------------------- -C* 3- or 4-char ID found. -C*---------------------------------------------------------------------- - WRITE ( 51, - + FMT = '(/,A,A5,3X,A,I2,I3.3,3x,A,3I2.2,A,2I2.2)' ) - + 'STID=', reportid(1:5), - + 'STNM=', INT(r8hdr(1,1)), INT(r8hdr(2,1)), - + 'TIME=', MOD(NINT(r8hdr(6,1)),100), - + NINT(r8hdr(7,1)), NINT(r8hdr(8,1)), - + '/', NINT(r8hdr(9,1)), 0 - WRITE ( 51, - + FMT = '(2(A,F7.2,1X),A,F7.1)' ) - + 'SLAT=', r8hdr(3,1), - + 'SLON=', r8hdr(4,1), - + 'SELV=', r8hdr(5,1) - ELSE -C*---------------------------------------------------------------------- -C* write WMO block/station instead -C*---------------------------------------------------------------------- - WRITE ( 51, - + FMT = '(/,A,I2,I3.3,3X,A,I2,I3.3,3x,A,3I2.2,A,2I2.2)' ) - + 'STID=', INT(r8hdr(1,1)), INT(r8hdr(2,1)), - + 'STNM=', INT(r8hdr(1,1)), INT(r8hdr(2,1)), - + 'TIME=', MOD(NINT(r8hdr(6,1)),100), - + NINT(r8hdr(7,1)), NINT(r8hdr(8,1)), - + '/', NINT(r8hdr(9,1)), 0 - WRITE ( 51, - + FMT = '(2(A,F7.2,1X),A,F7.1)' ) - + 'SLAT=', r8hdr(3,1), - + 'SLON=', r8hdr(4,1), - + 'SELV=', r8hdr(5,1) - END IF - - - WRITE ( 51, FMT = '(/,6(A8,1X))' ) - + 'PRES', 'HGHT', 'TMPK', 'DWPK', 'DRCT', 'SPED' - needHeader = .false. - END IF - DO jj = 1, nlev - -C*---------------------------------------------------------------------- -C* Convert pressure to millibars. -C*---------------------------------------------------------------------- - - IF ( r8lvl(1,jj) .lt. BFMSNG ) THEN - r8lvl(1,jj) = r8lvl (1,jj) / 100.0 - ELSE - r8lvl(1,jj) = GPMSNG - END IF - -C*---------------------------------------------------------------------- -C* Convert geopotential to height in meters. -C*---------------------------------------------------------------------- - - IF ( r8lvl(2,jj) .lt. BFMSNG ) THEN - r8lvl (2,jj) = r8lvl (2,jj) / 9.8 - ELSE - r8lvl (2,jj) = GPMSNG - END IF - - DO ii = 3, 6 - IF ( r8lvl(ii,jj) .ge. BFMSNG ) THEN - r8lvl (ii,jj) = GPMSNG - END IF - END DO - END DO -C*---------------------------------------------------------------------- -C* itterate through levels and add to total array -C* ignore -9999 and 0 pressure levels -C*---------------------------------------------------------------------- - IF ( nlevtot .eq. 0 ) THEN - nlevtot = 1 - END IF - DO jj = 1,nlev - IF ( r8lvl(1,jj) .gt. 99 ) THEN - DO ii = 1,6 - r8tmptot(ii,nlevtot) = r8lvl(ii,jj) - END DO - nlevtot = nlevtot + 1 - END IF - END DO - nlevtot = nlevtot - 1 - END IF - levelit = levelit + 1 - END DO -C*--------------------------------------------------------------------- -C* bubble sort so output starts at lowest level of the -C* atmosphere (usu. 1000mb), only if there are available -C* levels -C*--------------------------------------------------------------------- - IF (nlevtot .gt. 0) THEN - istop = nlevtot - 1 - iswflg = 1 - DO WHILE ( ( iswflg .ne. 0 ) .and. - + ( istop .ge. 1 ) ) - iswflg = 0 -C - DO j = 1, istop - IF ( r8tmptot(1,j) .lt. r8tmptot(1,j+1) ) THEN - iswflg = 1 - DO i = 1,6 - swpbuf = r8tmptot (i,j) - r8tmptot (i,j) = r8tmptot (i,j+1) - r8tmptot (i,j+1) = swpbuf - END DO - END IF - END DO - istop = istop-1 - END DO -C*--------------------------------------------------------------------- -C* check for exact or partial dupes and only write -C* one line for each level to output file. -C*--------------------------------------------------------------------- - DO jj = 1,nlevtot - DO ii = 1,6 - r8out(ii,jj) = r8tmptot(ii,jj) - END DO - END DO - - kk = 1 - DO jj = 1,nlevtot-1 - IF ( r8out(1,kk) .eq. r8tmptot(1,jj+1) ) THEN - r8out(1,kk) = r8tmptot(1,jj) - DO ii = 2,6 - IF ( r8out(ii,kk) .lt. r8tmptot(ii,jj+1)) - + THEN - r8out(ii,kk) = r8tmptot(ii,jj+1) - END IF - END DO - ELSE - kk = kk + 1 - r8out(1,kk) = r8tmptot(1,jj+1) - r8out(2,kk) = r8tmptot(2,jj+1) - r8out(3,kk) = r8tmptot(3,jj+1) - r8out(4,kk) = r8tmptot(4,jj+1) - r8out(5,kk) = r8tmptot(5,jj+1) - r8out(6,kk) = r8tmptot(6,jj+1) - END IF - END DO -C*---------------------------------------------------------------------- -C* write pres, hght, temp, dew point, wind dir, -C* and wind speed to output file. -C*---------------------------------------------------------------------- - DO jj = 1,kk - WRITE ( 51, FMT = '(6(F8.2,1X))' ) - + ( r8out (ii,jj), ii = 1,6 ) - END DO -C*---------------------------------------------------------------------- -C* Write info for the current station to new table. -C* Includes reportid, lat, lon, country, and blk/ -C* stn. -C*---------------------------------------------------------------------- - IF ( reportid .eq. ' ') THEN - WRITE ( reportid(1:6),FMT='(I6)') itabnum - CALL ST_RMBL ( reportid,reportid,len,iret ) - END IF - WRITE ( 52, FMT = '(A6,F7.2,1X,F7.2, - + 1X,A2,1x,I6)' ) - + reportid(1:6),r8hdr(3,1),r8hdr(4,1), - + tabcon,itabnum - END IF - END IF - END IF - END DO - - STOP - END diff --git a/util/sorc/webtitle.fd/README b/util/sorc/webtitle.fd/README deleted file mode 100755 index 4f26e568a6..0000000000 --- a/util/sorc/webtitle.fd/README +++ /dev/null @@ -1,9 +0,0 @@ -FF 11/09/12 -no essl library -intel's mkl blas/others are supposed to be compatible: http://en.wikipedia.org/wiki/Basic_Linear_Algebra_Subprograms - -other concerns: - -makefile:39: warning: overriding commands for target `webtitle' -makefile:34: warning: ignoring old commands for target `webtitle' - diff --git a/util/sorc/webtitle.fd/compile_webtitle_wcoss.sh b/util/sorc/webtitle.fd/compile_webtitle_wcoss.sh deleted file mode 100755 index 40cdc22f40..0000000000 --- a/util/sorc/webtitle.fd/compile_webtitle_wcoss.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/sh - -###################################################################### -# -# Build executable : GFS utilities -# -###################################################################### - -LMOD_EXACT_MATCH=no -source ../../../sorc/machine-setup.sh > /dev/null 2>&1 -cwd=$(pwd) - -if [ "$target" = "wcoss_dell_p3" ] || [ "$target" = "wcoss_cray" ] || [ "$target" = "hera" ] ; then - echo " " - echo " You are on WCOSS: $target " - echo " " -elif [ "$target" = "wcoss" ] ; then - echo " " - echo " " - echo " You are on WCOSS: $target " - echo " You do not need to build GFS utilities for GFS V15.0.0 " - echo " " - echo " " - exit -else - echo " " - echo " Your machine is $target is not recognized as a WCOSS machine." - echo " The script $0 can not continue. Aborting!" - echo " " - exit -fi -echo " " - -# Load required modules -source ../../modulefiles/gfs_util.${target} -module list - -set -x - -mkdir -p ../../exec -make -mv webtitle ../../exec -make clean diff --git a/util/sorc/webtitle.fd/makefile b/util/sorc/webtitle.fd/makefile deleted file mode 100755 index bcad6f8f9f..0000000000 --- a/util/sorc/webtitle.fd/makefile +++ /dev/null @@ -1,37 +0,0 @@ -# Modified BSM for WCOSS build 1/30/2013 -SHELL=/bin/sh - -SRCS= webtitle.f -OBJS= webtitle.o -# Tunable parameters -# -# FC Name of the fortran compiling system to use -# LDFLAGS Flags to the loader -# LIBS List of libraries -# CMD Name of the executable -# PROFLIB Library needed for profiling -# -FC = ifort - -LIBS= ${W3NCO_LIB4} - -CMD = webtitle -FFLAGS = -#FFLAGS = -debug - -# Lines from here on down should not need to be changed. They are the -# actual rules which make uses to build a.out. -# -all: $(CMD) - -$(CMD): $(OBJS) - $(FC) $(FFLAGS) -o $(@) $(OBJS) $(LIBS) - -clean: - -rm -f $(OBJS) - -clobber: clean - -rm -f $(CMD) - -void: clobber - -rm -f $(SRCS) makefile diff --git a/util/sorc/webtitle.fd/webtitle.f b/util/sorc/webtitle.fd/webtitle.f deleted file mode 100755 index b4bfdfa0b0..0000000000 --- a/util/sorc/webtitle.fd/webtitle.f +++ /dev/null @@ -1,147 +0,0 @@ -C$$$ MAIN PROGRAM DOCUMENTATION BLOCK -C . . . . -C MAIN PROGRAM: WEBTITLE -C PRGMMR: SAGER ORG: NP12 DATE: 2003-10-02 -C -C ABSTRACT: READS A FILE CONTAINING THE CURRENT DATE AND THE FORECAST -C HOUR AND WRITES A FILE CONTAINING A TITLE CONTAINING A REFORMATED -C DATE. THIS FILE IS USED TO CREATE A NEW FORMATED TITLE FOR THE -C NCEP MODEL GRAPHICS WEBSITE -C -C PROGRAM HISTORY LOG: -C -C 03-10-02 L. SAGER ORIGINAL VERSION -C 01-30-13 B. MABE Updated for WCOSS system. Remove Equiv and -C char to integer implied casts -C USAGE: -C INPUT FILES: -C FT05 - CURRENT DATE AND FORECAST HOUR -C -C OUTPUT FILES: -C FT55 - UPDATED TITLE CONTAINING REFORMATTED -C DATE -C -C SUBPROGRAMS CALLED: -C UNIQUE: - -C LIBRARY: - W3AI15 W3FS15 W3DOXDAT -C COMMON - -C -C EXIT STATES: -C COND = 0 - SUCCESSFUL RUN -C -C REMARKS: -C -C ATTRIBUTES: -C LANGUAGE: FORTRAN 90 -C MACHINE: IBM -C -C$$$ -C - INTEGER idat(8) - CHARACTER*4 cout(10) - CHARACTER*3 days(7) - CHARACTER*14 block - CHARACTER*40 line1 - CHARACTER*40 line2 - CHARACTER*4 tb1(2) - CHARACTER*2 tb2(3) - BYTE bsmdate(4) - BYTE retdate(4) - - DATA idat /8*0/ - DATA days /'SUN','MON','TUE','WED','THU','FRI','SAT'/ - - DATA line1 /'09/01/2003 12UTC 24HR FCST VALID TUE 09'/ - - DATA line2 /'/02/2003 12UTC NCEP/NWS/NOAA'/ - - CALL W3TAGB('WEBTITLE',2001,0275,0076,'NP12') -C -C Start by reading in the date/time -C - READ(5,102) block - 102 FORMAT(a14) - READ(block,100) tb1(1), tb1(2), tb2(1), tb2(2), tb2(3) - 100 FORMAT(2a4,4a2) - - read(tb1(1),*) jtau - read(tb1(2),*) iyear - iwork = iyear - 2000 - bsmdate(1)=iwork - read(tb2(1),*) bsmdate(2) - read(tb2(2),*) bsmdate(3) - read(tb2(3),*) bsmdate(4) - -C USAGE: CALL W3FS15 (IDATE, JTAU, NDATE) -C INPUT ARGUMENT LIST: -C IDATE - PACKED BINARY DATE/TIME AS FOLLOWS: -C BYTE 1 IS YEAR OF CENTURY 00-99 -C BYTE 2 IS MONTH 01-12 -C BYTE 3 IS DAY OF MONTH 01-31 -C BYTE 4 IS HOUR 00-23 -C SUBROUTINE TAKES ADVANTAGE OF FORTRAN ADDRESS -C PASSING, IDATE AND NDATE MAY BE -C A CHARACTER*1 ARRAY OF FOUR, THE LEFT 32 -C BITS OF 64 BIT INTEGER WORD. AN OFFICE NOTE 85 -C LABEL CAN BE STORED IN -C 4 INTEGER WORDS. -C IF INTEGER THE 2ND WORD IS USED. OUTPUT -C IS STORED IN LEFT 32 BITS. FOR A OFFICE NOTE 84 -C LABEL THE 7TH WORD IS IN THE 4TH CRAY 64 BIT -C INTEGER, THE LEFT 32 BITS. -C JTAU - INTEGER NUMBER OF HOURS TO UPDATE (IF POSITIVE) -C OR BACKDATE (IF NEGATIVE) -C -C OUTPUT ARGUMENT LIST: -C NDATE - NEW DATE/TIME WORD RETURNED IN THE -C SAME FORMAT AS 'IDATE'. 'NDATE' AND 'IDATE' MAY -C BE THE SAME VARIABLE. - - CALL w3fs15(bsmdate,jtau,retdate) -C -C... w3doxdat returns the day of the week -C -C INPUT VARIABLES: -C IDAT INTEGER (8) NCEP ABSOLUTE DATE AND TIME -C (YEAR, MONTH, DAY, TIME ZONE, -C HOUR, MINUTE, SECOND, MILLISECOND) -C -C OUTPUT VARIABLES: -C JDOW INTEGER DAY OF WEEK (1-7, WHERE 1 IS SUNDAY) -C JDOY INTEGER DAY OF YEAR (1-366, WHERE 1 IS JANUARY 1) -C JDAY INTEGER JULIAN DAY (DAY NUMBER FROM JAN. 1,4713 B.C.) -C - idat(1) = iyear - idat(2) = retdate(2) - idat(3) = retdate(3) - idat(5) = retdate(4) - - CALL w3doxdat(idat,jdow,jdoy,jday) - -C -C Convert the valid date back to character -C - - CALL w3ai15(idat,cout,10,2,' ') - - - line1(1:2) = block(9:10) - line1(4:5) = block(11:12) - line1(9:10) = block(7:8) - line1(12:13) = block(13:14) - line1(18:20) = block(2:4) - line1(35:37) = days(jdow) - line1(39:40) = cout(2)(1:2) - - line2(2:3) = cout(3)(1:2) - line2(7:8) = cout(1)(1:2) - line2(10:11) = cout(5)(1:2) - - - - write(55,105) line1,line2 - 105 FORMAT(2a40) - - CALL W3TAGE('WEBTITLE') - STOP - END diff --git a/util/ush/finddate.sh b/util/ush/finddate.sh deleted file mode 100755 index 1805c2103c..0000000000 --- a/util/ush/finddate.sh +++ /dev/null @@ -1,163 +0,0 @@ -# finddate.sh -# author: Luke Lin phone: 457-5047 24 June 1998 -# abstract: This script looks in ether forward or backward in time to -# generate either a variable containing sequential date/time stamps -# for a period up to a month or just the date/time stamp occurring -# at the end of such a period. -# Time stamp is in the form yyyyddmm. The script should be good for many -# years. Leap years are accounted for. Years go 1998, 1999, 2000, 2001, -# 2002, 2003, .... -# etc. -# -# usage: examples assume todays date is 19990929. -# To generate a sequence looking 10 days forward then execute: -# list=$(sh /nwprod/util/scripts/finddate.sh 19990929 s+10) -# To generate just the date/time 10 days from now then execute: -# list=$(sh /nwprod/util/scripts/finddate.sh 19990929 d+10) -# To generate a sequence looking 10 days backward then execute: -# list=$(sh /nwprod/util/scripts/finddate.sh 19990929 s-10) -# To generate just the date/time 10 days ago then execute: -# list=$(sh /nwprod/util/scripts/finddate.sh 19990929 d-10) -# list will contain 10 time stamps starting with 19990929. Time stamps -# are separated by blanks. -# -# This script will work for periods up to a month. The number indicating -# the period in question should be two digits. For single digits 1-9 -# use 01, 02, 03, etc. -set +x -unset pdstr -today=$1 -var=$2 -yy=$(echo $today | cut -c1-4 ) -mm=$(echo $today | cut -c5-6 ) -dd=$(echo $today | cut -c7-8 ) -nxtyy=$yy -pyy=$yy -what=$(echo $var | cut -c1-1) -up=$(echo $var | cut -c2-2) -num=$(echo $var | cut -c3-4) -mod=$(expr \( $yy / 4 \) \* 4 - $yy ) -leap=0 -if test "$mod" -eq 0 -then -leap=1 -fi -case $mm in -01) mday=31 - pday=31 - pmon=12 - pyy=$(expr $yy - 1) - if test $pyy -lt '0' - then - pyy='1999' - fi - nxtmon=02;; -02) mday=$(expr "$leap" + 28 ) - pday=31 - pmon=01 - nxtmon=03;; -03) mday=31 - pday=$(expr "$leap" + 28 ) - pmon=02 - nxtmon=04;; -04) mday=30 - pday=31 - pmon=03 - nxtmon=05;; -05) mday=31 - pday=30 - pmon=04 - nxtmon=06;; -06) mday=30 - pday=31 - pmon=05 - nxtmon=07;; -07) mday=31 - pday=30 - pmon=06 - nxtmon=08;; -08) mday=31 - pday=31 - pmon=07 - nxtmon=09;; -09) mday=30 - pday=31 - pmon=08 - nxtmon=10;; -10) mday=31 - pday=30 - pmon=09 - nxtmon=11;; -11) mday=30 - pday=31 - pmon=10 - nxtmon=12;; -12) mday=31 - pday=30 - pmon=11 - nxtmon=01 - nxtyy=$(expr $yy + 1 ) - if test $yy -eq 1999 - then - nxtyy=2000 - fi ;; -*) echo mon=$mon is illegal - exit 99 ;; -esac - -if test $dd -gt $mday -then - echo "day=$dd is illegal. In month=$mon there are only $mday days." - exit 16 -fi - -i=1 -n=0 -while test $i -le $num -do - if test "$up" = '+' - then - ddn=$(expr $dd + $i) - mmn=$mm - yyn=$yy - if test $ddn -gt $mday - then - n=$(expr $n + 1) - ddn=$n - mmn=$nxtmon - yyn=$nxtyy - fi - if test $ddn -lt 10 - then - ddn="0$ddn" - fi - elif test "$up" = '-' - then - ddn=$(expr $dd - $i) - mmn=$mm - yyn=$yy - if test $ddn -le '0' - then - n=$(expr $pday + $ddn) - ddn=$n - mmn=$pmon - yyn=$pyy - fi - if test $ddn -lt 10 - then - ddn="0$ddn" - fi - else - echo '+ or - are allowed for 2nd variable in argument.' - echo "You tried $up, this is illegal." - exit 16 - fi - i=$(expr $i + 1 ) - if test "$what" = 's' - then - pdstr=$pdstr"$yyn$mmn$ddn " - else - pdstr=$yyn$mmn$ddn - fi -done -echo $pdstr diff --git a/util/ush/make_NTC_file.pl b/util/ush/make_NTC_file.pl deleted file mode 100755 index 478bd6a288..0000000000 --- a/util/ush/make_NTC_file.pl +++ /dev/null @@ -1,119 +0,0 @@ -#!/usr/bin/perl -# -#------------------------------------------------------ -# -# This is make_NTC_file.pl -# It attaches the appropriate headers to the input file -# and copies it to a unique name for input to NTC. -# -# The following lines are prepended to the file: -# 1. A Bulletin Flag Field Seperator -# 2. A WMO header line -# 3. An optional subheader, e.g. DIFAX1064 -# -# Input wmoheader Originator datetime path -# where: -# wmoheader - WMO id to use in WMO header. -# subheader - "NONE" if none. -# Originator - Originator to use in WMO header -# datetime - date/time to use in WMO header, yyyymmddhh -# path - name input file -# output_path - name of output file -# -# Author: Paula Freeman based on script by Larry Sager -# -#------------------------------------------------------ - -$NArgs = @ARGV; - -if ($NArgs < 6) { - usage (); - exit; -} - -# -# Get input -# - -$WMOHeader=shift; -$Origin=shift; -$YYYYMMDDHH=shift; -$SubHeader=shift; -$Filename=shift; -$OutputFilename=shift; - -print "Filename is $Filename\n"; -print "Output Filename is $OutputFilename\n"; -$YYYYMMDDHH =~ /\d{4}(\d{2})(\d{4})/; -$MMDDHH = $1 . $2; -$DDHHMM = $2 . "00"; -print "WMOHeader = $WMOHeader\n"; -print "SubHeader = $SubHeader\n"; -print "Origin = $Origin\n"; - - -if ( ($WMOHeader eq "") || ($Origin eq "") || ($YYYYMMDDHH eq "") || ($Filename eq "") || ($OutputFilename eq "") || ($SubHeader eq "") ) { - usage (); - exit; -} - -# -# Create the file for TOC -# - - make_toc (); -# -# - - -sub usage () { - print "Usage: $0 \n"; -} - -sub make_toc { - -# -# Attach WMO header and subheader (if not "NONE"). -# Get the bytecount of file to insert into the Bulletin Flag Field Seperator. -# Add in length of WMO header, plus two carriage returns and line feed. -# If Subheader specified, count that in also, plus line a feed. -# - - $Header = "$WMOHeader $Origin $DDHHMM"; - $ByteCount = $(wc -c $Filename | cut -c1-8); - $ByteCount= $ByteCount + length($Header) + 3; - if ($SubHeader =~ /NONE/) { - print "No Subheader\n"; - } else { - if ($SubHeader =~ /IMAG/){ - $ByteCount = $ByteCount + length($SubHeader); - } else { - $ByteCount = $ByteCount + length($SubHeader) + 3; - } - } - $BulletinFlagFieldSep = sprintf( "****%10.10d****", $ByteCount); - - open(OUTFILE, ">$OutputFilename") or die "Cannot open $OutputFilename for output."; - print OUTFILE "$BulletinFlagFieldSep\n"; - print OUTFILE "$Header\r\r\n"; - if ($SubHeader =~ /NONE/) { - print "No Subheader\n"; - } else { - if ($SubHeader =~ /IMAG/){ - print OUTFILE "$SubHeader"; - } else { - print OUTFILE "$SubHeader\r\r\n"; - } - } - open (INFILE, $Filename) or die "Cannot open $Filename"; - - while ($rec=) { - print OUTFILE $rec; - } - - close INFILE; - close OUTFILE; - - print "$Filename -> $OutputFilename\n"; -} - diff --git a/util/ush/make_ntc_bull.pl b/util/ush/make_ntc_bull.pl deleted file mode 100755 index c6ca287ead..0000000000 --- a/util/ush/make_ntc_bull.pl +++ /dev/null @@ -1,250 +0,0 @@ -#!/usr/bin/perl -# -#------------------------------------------------------ -# -# This is make_ntc_bull.pl -# It attaches the appropriate headers to the input file -# and copies it to a unique name for input to NTC. -# -# A Bulletin Flag Field Separator is prepended to the -# text bulletin. This TOC header contains the total -# number of bytes in the product not counting the -# bulletin flag field separator. -# -# Input: -# File identifier - Output name identier. -# subheader - "NONE" if none. -# Originator - Not used currently -# datetime - Not used currently -# filename - input file name -# output_path - name of output file -# -# Author: Larry Sager based on a script by Paula Freeman -# -# 31 Oct 05 -- new script -# -#------------------------------------------------------ - -if ($ENV{job}) { $job=$ENV{job}; } -if ($ENV{SENDCOM}) { $SENDCOM=$ENV{SENDCOM}; } -if ($ENV{SENDDBN}) { $SENDDBN=$ENV{SENDDBN}; } -$NArgs = @ARGV; - -if ($NArgs < 6) { - usage (); - exit; -} - -# -# Get input -# - -$NAME=shift; -$WMOname=shift; -$ORIGname=shift; -$DATEname=shift; -$Filename=shift; -$OutputFilename=shift; -print " Input : $Filename"; -print " Output: $OutputFilename"; - - -if ( ($Filename eq "") || ($OutputFilename eq "") ) { - usage (); - exit; -} - -# -# Create the file for TOC -# - if ( $NAME eq "plot" ) { - make_tocplot (); - } - elsif ($NAME eq "redb" ) { - make_tocredb (); - } - else { - make_tocbull (); - } -# -# - - -sub usage () { - print "Usage: $0 \n"; -} - -sub make_tocbull { - -# -# Attach WMO header -# Get the bytecount of file to insert into the Bulletin Flag Field Seperator. -# - - $ix = 0; - $under = "_"; - open (INFILE, $Filename) or die "Cannot open $Filename"; - - while ($cho=) { - $rec = $rec . $cho; - } - $cho = $rec; - $cho =~ s/\n//g; - $cho =~ s/<<@@/\r\r\n/g; - $cho =~ s/<<@/\r\r\n/g; - $cho =~ s/<//g; - $cho =~ s/\^//g; - $cho =~ s/\$//g; - $cho =~ s/\|/+/g; - $value = 40; - $Outp="$OutputFilename"; - open(OUTFILE, ">$Outp") or die "Cannot open $OutputFilename for output."; - while ($ix == 0) { - $cho = substr($cho,$value); - $value = 38; - $cho =~ s/'1/\&\&/; - $cho =~ s/'0/\&\&/; -# print "cho is $cho"; - ($cho2,$cho) = split(/\&\&/,$cho); - ($cho2,$cho3) = split(/\%/,$cho2); -# print "cho2 is $cho2"; - $ByteCount = length($cho2); - print " length is $ByteCount "; - $BulletinFlagFieldSep = sprintf( "****%10.10d****", $ByteCount); - if ($SENDCOM eq "YES") { - if ($ByteCount > 50 ) { - print OUTFILE "$BulletinFlagFieldSep\n"; - print OUTFILE $cho2; - } - else { - $ix = 1; - } - } - } - close OUTFILE; - if ($SENDDBN eq "YES" ) { -# Modified 20051205 by wx11rp to ensure the current production machine is used. -# $dbn_alert="/gpfs/w/nco/dbnet/bin/dbn_alert"; - $dbn_alert=$ENV{'DBNROOT'} . "/bin/dbn_alert"; - $type="GRIB_LOW"; - $job2=$job; - $subtype=$ORIGname; - $file_path=$Outp; - @command = ($dbn_alert, $type, $subtype, $job2, $file_path); - if (system (@command) != 0) { - print "Error alerting: @command \n"; - } - } - - close INFILE; - close OUTFILE; - - print "$Filename -> $OutputFilename\n"; -} - -sub make_tocplot { - -# -# Attach WMO header -# Get the bytecount of file to insert into the Bulletin Flag Field Seperator. -# - - $ix = 0; - $under = "_"; - open (INFILE, $Filename) or die "Cannot open $Filename"; - - while ($cho=) { - $rec = $rec . $cho; - } - $cho = $rec; -# $Outp="$OutputFilename$under$job"; - $Outp="$OutputFilename"; - open(OUTFILE, ">$Outp") or die "Cannot open $OutputFilename for output."; - while ($ix == 0) { - $cho =~ s/\$\$/\&\&/; - ($cho2,$cho) = split(/\&\&/,$cho); -# $cho2 =~ s/@/ /g; -# $cho2 = $cho2 . " "; - $ByteCount = length($cho2); - print " length is $ByteCount "; - $BulletinFlagFieldSep = sprintf( "****%10.10d****", $ByteCount); - if ($SENDCOM eq "YES") { - if ($ByteCount > 50 ) { - print OUTFILE "$BulletinFlagFieldSep\n"; - print OUTFILE $cho2; - } - else { - $ix = 1; - } - } - } - close OUTFILE; - if ($SENDDBN eq "YES" ) { -# 20051205 Modified by wx11rp to allow the script to run on any manchine labeled as the production machine -# $dbn_alert="/gpfs/w/nco/dbnet/bin/dbn_alert"; - $dbn_alert=$ENV{'DBNROOT'} . "/bin/dbn_alert"; - $type="GRIB_LOW"; - $subtype=$DATEname; - $job2=$job; - $file_path=$Outp; - @command = ($dbn_alert, $type, $subtype, $job2, $file_path); - if (system (@command) != 0) { - print "Error alerting: @command \n"; - } - } - - close INFILE; - close OUTFILE; - - print "$Filename -> $OutputFilename\n"; -} -sub make_tocredb { - -# -# Prepare the Redbook graphic for transmission to TOC by removing the AWIPS -# header and creating an NTC header. Get the Bytecount of the file to -# insert into the Bulletin Flag Field Seperator. -# - - $ix = 0; - $under = "_"; - open (INFILE, $Filename) or die "Cannot open $Filename"; - - while ($cho=) { - $rec = $rec . $cho; - } - $cho = $rec; - $Outp="$OutputFilename"; - open(OUTFILE, ">$Outp") or die "Cannot open $OutputFilename for output."; - $cho = substr($cho,24); - $ByteCount = length($cho); - print " length is $ByteCount "; - $BulletinFlagFieldSep = sprintf( "****%10.10d****", $ByteCount); - if ($SENDCOM eq "YES") { - if ($ByteCount > 50 ) { - print OUTFILE "$BulletinFlagFieldSep\n"; - print OUTFILE $cho; - - } - } - close OUTFILE; - if ($SENDDBN eq "YES" ) { -# 20051205 Modified by wx11rp to allow the script to run on any manchine labeled as the production machine -# $dbn_alert="/gpfs/w/nco/dbnet/bin/dbn_alert"; - $dbn_alert=$ENV{'DBNROOT'} . "/bin/dbn_alert"; - $type="GRIB_LOW"; - $subtype=$DATEname; - $job2=$job; - $file_path=$Outp; - @command = ($dbn_alert, $type, $subtype, $job2, $file_path); - if (system (@command) != 0) { - print "Error alerting: @command \n"; - } - } - - close INFILE; - close OUTFILE; - - print "$Filename -> $OutputFilename\n"; -} diff --git a/util/ush/make_tif.sh b/util/ush/make_tif.sh deleted file mode 100755 index 2609d1d797..0000000000 --- a/util/ush/make_tif.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/sh - -cd $DATA -# -# Use Image Magick to convert the GIF to TIF -# format -# -# module show imagemagick-intel-sandybridge/6.8.3 on CRAY -# export PATH=$PATH:/usrx/local/prod/imagemagick/6.8.3/intel/sandybridge/bin:. -# export LIBPATH="$LIBPATH":/usrx/local/prod/imagemagick/6.8.3/intel/sandybridge/lib -# export DELEGATE_PATH=/usrx/local/prod/imagemagick/6.8.3/intel/sandybridge/share/ImageMagick-6 - -# module show imagemagick/6.9.9-25 on DELL - export PATH=$PATH:/usrx/local/dev/packages/ImageMagick/6.9.9-25/bin:. - export LIBPATH="$LIBPATH":/usrx/local/dev/packages/ImageMagick/6.9.9-25/lib - export DELEGATE_PATH=/usrx/local/dev/packages/ImageMagick/6.9.9-25/share/ImageMagick-6 - - outname=out.tif - - convert gif:$input fax:$outname - -# -# Add the ntc heading: -# - -WMO=QTUA11 -ORIG=KWBC -PDYHH=${PDY}${cyc} - -if [ $HEADER = "YES" ] -then - INPATH=$DATA/$outname - SUB=DFAX1064 -# make_NTC_file.pl $WMO $ORIG $PDYHH $SUB $INPATH $OUTPATH - $UTILgfs/ush/make_NTC_file.pl $WMO $ORIG $PDYHH $SUB $INPATH $OUTPATH -# -# Send the graphic to TOC - - cp $OUTPATH ${COMOUTwmo}/gfs_500_hgt_tmp_nh_anl_${cyc}.tif - if [ $SENDDBN = YES ]; then - - $DBNROOT/bin/dbn_alert GRIB_LOW ${NET} ${job} ${COMOUTwmo}/gfs_500_hgt_tmp_nh_anl_${cyc}.tif - fi -fi - diff --git a/util/ush/month_name.sh b/util/ush/month_name.sh deleted file mode 100755 index a31a82e8a2..0000000000 --- a/util/ush/month_name.sh +++ /dev/null @@ -1,112 +0,0 @@ -#!/bin/ksh - -#################################################################### -# -# SCRIPT: month_name.sh -# -# This script returns the name/abreviation of a month -# in a small text file, month_name.txt. It also echos the -# name/abreviation to stdout. The form of the returned -# name/abreviation is specified by the script arguments. -# -# USAGE: ./month_name.sh < month > < monthspec> -# -# EXAMPLE: ./month_name.sh 5 MON -# -# month spec contents of month_name.txt -# ----------- ------ ---------------------------- -# -# 6/06 Mon Jun -# 8/08 Month August -# 9/09 MON SEP -# 11 MONTH NOVEMBER -# -# -# Note: Variables may be assigned the value of the returned name -# by either of the following methods: -# -# MM=$(cat month_name.txt) after executing month_name.sh -# - OR - -# MM=$(month_name.sh 5 MON) (for example) -# -# -# -# HISTORY: 07/08/2005 - Original script -# -# -#################################################################### - - - typeset -Z2 month_num - - - month_num=$1 - month_spec=$2 - - case ${month_num} in - - 01) Mon=Jan - Month=January ;; - - 02) Mon=Feb - Month=February ;; - - 03) Mon=Mar - Month=March ;; - - 04) Mon=Apr - Month=April ;; - - 05) Mon=May - Month=May ;; - - 06) Mon=Jun - Month=June ;; - - 07) Mon=Jul - Month=July ;; - - 08) Mon=Aug - Month=August ;; - - 09) Mon=Sep - Month=September ;; - - 10) Mon=Oct - Month=October ;; - - 11) Mon=Nov - Month=November ;; - - 12) Mon=Dec - Month=December ;; - - esac - - - if [ ${month_spec} = Mon ]; then - - echo ${Mon} - echo ${Mon} > month_name.txt - - elif [ ${month_spec} = Month ]; then - - echo ${Month} - echo ${Month} > month_name.txt - - elif [ ${month_spec} = MON ]; then - - MON=$(echo ${Mon} | tr [a-z] [A-Z]) - echo ${MON} - echo ${MON} > month_name.txt - - elif [ ${month_spec} = MONTH ]; then - - MONTH=$(echo ${Month} | tr [a-z] [A-Z]) - echo ${MONTH} - echo ${MONTH} > month_name.txt - - fi - - - diff --git a/versions/fix.ver b/versions/fix.ver new file mode 100644 index 0000000000..775155e70e --- /dev/null +++ b/versions/fix.ver @@ -0,0 +1,22 @@ +#!/bin/bash +# Fix file subfolder versions + +export aer_ver=20220805 +export am_ver=20220805 +export chem_ver=20220805 +export cice_ver=20220805 +export cpl_ver=20220805 +export datm_ver=20220805 +export gdas_crtm_ver=20220805 +export gdas_fv3jedi_ver=20220805 +export gdas_gsibec_ver=20221031 +export glwu_ver=20220805 +export gsi_ver=20230112 +export lut_ver=20220805 +export mom6_ver=20220805 +export orog_ver=20220805 +export reg2grb2_ver=20220805 +export sfc_climo_ver=20220805 +export ugwd_ver=20220805 +export verif_ver=20220805 +export wave_ver=20230426 diff --git a/workflow/README_ecflow.md b/workflow/README_ecflow.md new file mode 100644 index 0000000000..5dd73617ed --- /dev/null +++ b/workflow/README_ecflow.md @@ -0,0 +1,960 @@ +# Workflow Setup Utility: + +## Introduction + +This utility is designed to be an automated ecFlow and Rocoto generation application, +used to create the folder structures and scripts needed to execute the workflows +for either application. As of June 2022, this application only works for ecFlow. + +### How ecFlow Setup Works + +For ecFlow creation, the application takes a YAML file as input, pulls in any +environment variables that are specified in the YAML, pulls in any environment +variables that are set in the shell, then using the ecFlow +API, a definition file is created. While the application creates the definition file +it also uses the path defined as `ECFgfs`, which will be elaborated on later in this +guide, and creates the folders and scripts that match the definition file, setting the +`ECFHome` variable in the definition file to match the `ECFgfs` parameter. + +Please refer to the [setup the YAML](#configuring-the-yaml-file) section for instructions +on how to setup the YAML file for what you want. + +## Setup for using the utility with ecFlow + +This utility uses Python3.6 and later. It will not work with Python anything before +Python3.6. + +### Pre-Requisites + +In order to run the application the following Python3 modules need to be available: +* ecflow +* numpy +* PyYAML + +These modules should be available on Hera and Orion. + +Additionally, this utility makes use of modules within the ecflow_setup folder so you +need to ensure that your `PYTHONPATH` or whatever module pathing you are using +does allow you to import modules from other parts of the application folders. By +default this should generally be done without setting additional parameters but it +is possible that a custom parameter may be needed. + +### Experiment Setup + +This application requires the use of a config.base file. With Rocoto and other applications +the config.base file was setup with the experiment scripts so this application does +presume that the `config.base` file is setup and working. It does NOT import any other +configuration files from that same directory, though it would not be hard to modify the code +to pull in other parameters as well. The command line argument `--expdir` can be used +to identify the folder that contains the `config.base` file. The file will be read in and +the ush/rocoto/workflow_utils.py script will be used to populate any environment +variables that are needed. + +### Required Environment Variables + +If not setup within the script, the following environment variables are required +either in the shell or within the config.base file: +* Account +* Queue +* machine +* RUN_ENVIR + +These parameters are populated as 'edits' within the ecFlow definition file for +any of the suites that are created. + +An additional environment variable that is needed is: +* ECFgfs +This parameter is used in a number of different locations within the application. +First, it will set the destination for populating the scripts and directories +needed to execute the definition plan within that folder. Second it will set the +`ECF_HOME` and `ECF_INCLUDE` parameters to that value. Third, if no scriptrepo +parameter is set, it will look in the `$ECFgfs/scripts` folder for the scripts +associated with the tasks or templates defined within the YAML file. + +If not already in the environment, it is suggested to add the following to the config.base file: +`export ECFgfs=$HOMEgfs/ecf` + +* **NOTE**: Older versions of the `config.base` may not contain this export so it +will be important to add as the application does rely on some pieces of information +from that folder. +* **NOTE**: In the examples provided below and their output, the `HOMEgfs` parameter +is set to `/usr1/knevins/global-workflow` so you can make the associated reference in +yours to match the output that you are looking to accomplish. + +## Configuring the YAML file + +The utility works primarily off of the yaml file used to define the suites, +families, and tasks. You will need to define the pieces within the file using a +YAML syntax and then the reserved words identified below. + +### Using Environment Variables in the YAML + +This application was built to use environment variables provided either through an export in the +shell environment or by specifying a value in the config.base file. To use an environment value, +the YAML file has a reserved word prefix `env.`. The code functions by parsing the YAML file into +a dictionary then doing a recursive search over that dictionary to determine if the `env.` prefix +is used anywhere, either a value or key. When a node uses that syntax, the application will search first +the current shell environment variables for a match, if none exists, then it will search +the `config.base` file for any configurations that may have been exported from there. Finally, it will +then replace the string `env.PARAMETER` with the value from the shell or `config.base` file in the +dictionary that was imported. The original YAML file will remain unchanged. + +**NOTE:** The environment variable cannot be used in conjunction with a string so trying to use +`env.ECFgfs/include` will return only the value for `ECFgfs`, it will not append any strings or +values to the beginning or end of the value. + +Example: +Entering `env.FHMAX_GFS` as a value for a node will use the value that was +specified in the `config.base` file for the `FHMAX_GFS` export. This will be reflected in the final +definition file. It will not be updated in the original YAML file, that will remain as +`env.FHMAX_GFS`. + + +### Script Repository + +The workflow generator will create the folders and scripts necessary to run the suite +that is defined within the YAML file. It will create the items at the ECFgfs folder location. +However, the generator needs to know where to look for the scripts to copy them to the +final destination spot. This is done using the scriptrepo variable. The scriptrepo +can be specified either by an environment variable or by setting +a `scriptrepo: /path/to/scripts` variable in the YAML file at the top level. By default the +generator will look in `$ECFgfs/scripts` for the scripts. + +The scripts in the script repo do not need to be in any particular folders, they just need to +be unique names. The generator will recursively look in all folders for a script that matches +the task that was set up in the YAML file. For tasks that are loops or lists, a template +parameter can be used. Please refer to the [Setting up the tasks](#setting-up-the-tasks) +section of this guide. + +### A Basic YAML File + +The YAML file follows the standard YAML syntax structure. It is suggested to use the `---` +triple hyphen line to start the file, followed by the start of the syntax. Some YAML editors +will allow you to specify the indentation between the lines but the common practice +is to set the following when creating your YAML file: +* Use `---` to start the file +* Use two spaces for indentation on sub-items +* A colon followed by a space, `: `, is an indicator of a mapping. +* A space followed by a pound sign ` #` is an indicator of a comment. + +Running with just a suite declared in the YAML file such as: +```YAML +--- +suites: + prod00: +``` +Will return a definition file that has the following content with the default suite edits as +well as the `ECF_HOME` and `ECF_INCLUDE` parameters being set to the `ECFgfs` parameter set +in the config.base file. +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' +endsuite +# enddef +``` + +### Setting up externs + +To add externs, add the `externs:` label to the base level of the yaml file, +then add each extern as a yaml list object. Please note that the configuration of externs +as options for tasks has not yet been enabled. Those would have to be manually configured +after creating the definition file with the generator. +* Example + +```YAML +--- +externs: + - "/prod18/enkfgdas/post" + - "/prod18/enkfgfs/post" +suites: + prod00: +``` +* Result: +```bash +#5.8.1 +extern /prod18/enkfgdas/post +extern /prod18/enkfgfs/post +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' +endsuite +# enddef +``` + +### Setting up a suite + +To add items to a suite, first add a `suites:` line to the YAML file at the top level. Once +the `suites:` line has been added, as a sub object underneath, add whatever suites you would +like to have as dictionary objects. + +Suites can be either strings that define the single suite or it can be a list object defined +by using the `[ ]` syntax, such as `prod[00,06]`, which would create two suites, one called +`prod00` and one called `prod06`. + +A suite can be defined more than once. For example, if you want two suites, `prod00` and `prod06` +which contain almost completely exact entities but the `prod06` has one extra task, you +can define a suite for `prod[00,06]` and put all of the tasks for both in that section and then +below that define a `prod06` suite and add in the specific task you want. + +The generator goes through the suites serially, taking whatever is defined first in the +file and then go through the rest. This means that overrides are possible, so the order +in which the file is set up does matter. + +* Example: +This will produce a definition file with two suites, prod00 and prod06. +```YAML +suites: + prod[00,06]: + prod00: +``` +* Result: +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' +endsuite +suite prod06 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' +endsuite +# enddef +``` + +### Setting Up Families and Tasks + +Once a suite line has been added, families need to be added under a `nodes:` header. +First add the `nodes:` dictionary line under the suite name, then create the hierarchical +structure for the families. +Families can be dictionary objects under other families. In the example below, the suites +`prod00` and `prod06` will have the family `gfs`. +Then only the `prod00` suite will have the family `gdas` added to it. + +Once the family structure has been setup, add in a `tasks` dictionary under the +family to which you want to add that task. In the example below, both the `prod00` +and `prod06` suites will have the `gfs` family with the `gfs_forecast` task but the +`prod00` suite will also have the `gdas` family with the `jgdas_forecast` task. + +**Note**: The families can have a hierarchy but if there is nothing in the family +then the definition file considers it an empty one and does not add it to the overall +definition file. As long as there is a node definition within the family, such as a +task, repeat, other family, or trigger, it will show up in the definition file. + +* Example +```YAML +suites: + prod[00,06]: + nodes: + gfs: + tasks: + jgfs_forecast: + prod00: + nodes: + gdas: + tasks: + jgdas_forecast: +``` +* Result: +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + task jgfs_forecast + endfamily + family gdas + task jgdas_forecast + endfamily +endsuite +suite prod06 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + task jgfs_forecast + endfamily +endsuite +# enddef +``` + +### Adding edits + +Edits can be added to either families, tasks or suites by putting an `edits:` +dictionary tag and then listing the edits below. The format for edits will be +the edit variable on the left and then the value on the right. + +So in this example below, lets consider that we want the `RUN` value to be +`GFS` for both the `prod00` and `prod06` suite but we wnat the `CYC` value +to be `00` for the `prod00` suite and `06` for the `prod06` suite. So in +that case we would use the individual declaration for the suites for the +`CYC` value only and then the listed suites declaration for the rest. +* Example + +```YAML +suites: + prod00: + edits: + CYC: '00' + prod06: + edits: + CYC: '06' + prod[00,06]: + nodes: + gfs: + edits: + RUN: 'GFS' + tasks: + jgfs_forecast: +``` +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + edit CYC '00' + family gfs + edit RUN 'GFS' + task jgfs_forecast + endfamily +endsuite +suite prod06 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + edit CYC '06' + family gfs + edit RUN 'GFS' + task jgfs_forecast + endfamily +endsuite +# enddef +``` + +#### Task Setup Extras + +Tasks are added in as a dictionary under the `tasks:` header. So if you want to add +multiple tasks to a family, do not add them in list syntax, add them as hashes to the dictionary. + +* Example +```YAML +suites: + prod[00,06]: + nodes: + gfs: + atmos: + analysis: + tasks: + jgfs_atmos_analysis: + jgfs_atmos_analysis_calc: +``` +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + family atmos + family analysis + task jgfs_atmos_analysis + task jgfs_atmos_analysis_calc + endfamily + endfamily + endfamily +endsuite +suite prod06 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + family atmos + family analysis + task jgfs_atmos_analysis + task jgfs_atmos_analysis_calc + endfamily + endfamily + endfamily +endsuite +# enddef +``` + +#### Task Script Repository and Templates + +When adding tasks, it is possible that you may want to run a task for every forecast hour in a large range +but not want to copy and paste the same script for every forecast hour. With the generator application, you +can specify a `template:` parameter. After defining the [script repo](#script-repository) parameter, the +application will search the defined directory for the template script. It will then copy the template script to +the destination folder for the suite with an adjusted name. + +In the example below, you can see the range used for the `jgfs_atmos_post_f` forecast hour task with a template. +Please refer to the [ranges and lists](#ranges-and-lists) section of this document for information on how to set +up a range but for the purposes of the example below, we are focusing on the template. What is relevant here is +that we want 4 instances of the `jgfs_atmos_post_f` forecast hour script to be in place and use the same +`jgfs_atmos_post_master` script for the template. + +In addition to the resultant defintion file, noted below is the folder that was created for the `prod00` suite. The +`prod00` folder is located at the `$HOMEecf`, in the case below you cans see it is defined as +`/usr1/knevins/global-workflow/ecf` location and contains four instances of the `jgfs_atmos_post_master` +script, each renamed to match the `task` name in the definition file. + +**NOTE:** A special template value is `skip`. If you use `template: skip` in a task, the generator will know that +the script is in fact not in the script repository and it will not attempt to copy or create it but it will +add it to the definition file. This is useful in conjunction with the [defstatus](#defstatus) parameter so the +suite will skip already done tasks and there won't be a representation of it in the final directory. + +* Example +```YAML +suites: + prod00: + nodes: + gfs: + atmos: + post: + tasks: + jgfs_atmos_post_f( 4 ): + template: jgfs_atmos_post_master +``` + +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + family atmos + family post + task jgfs_atmos_post_f000 + task jgfs_atmos_post_f001 + task jgfs_atmos_post_f002 + task jgfs_atmos_post_f003 + endfamily + endfamily + endfamily +endsuite +# enddef +``` + +* Resulting Folder Setup +```bash +$ tree prod00 +prod00 +└── gfs + └── atmos + └── post + ├── jgfs_atmos_post_f000.ecf + ├── jgfs_atmos_post_f001.ecf + ├── jgfs_atmos_post_f002.ecf + └── jgfs_atmos_post_f003.ecf + +3 directories, 4 files +``` + +### Setting Up Triggers, Events, and Defstatus + +#### Events +To add an event, you first need to add the `events:` dictionary heading underneath the node to which it needs to be +added. Then underneath that `events:` heading, as a list object, add the list of events that you want have attached. + +**NOTE:** Events can be ranges or list objects, please see the section below on creating lists or ranges. + +**NOTE:** Events must be added in a list. This is not the same structure as adding tasks, which are dictionary objects, +the events list is an actual list so please make sure to add a hyphen, `-`, in front of every event that you wish to +add. + +* Example +```YAML +suites: + prod00: + nodes: + gfs: + atmos: + analysis: + tasks: + jgfs_atmos_analysis: + events: + - release_fcst +``` + +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + family atmos + family analysis + task jgfs_atmos_analysis + event release_fcst + endfamily + endfamily + endfamily +endsuite +# enddef +``` + +#### Triggers + +To add a trigger, add a `triggers:` dictionary heading underneath the task or family. The triggers need to be a list +item with the identifier for what you want the trigger to look for. So for a task, it would be `- task: task_name` or +for a family it would be `- family: family_name` + +**NOTE:** It was mentioned above but an important distinction from tasks is that triggers need to be in list format. +The reason for this is due to triggers being either families or tasks, and that is determined by the dictionary +label for the list item. + +**NOTE:** By default, multiple triggers are added to a node with __AND__ + +Triggers can also have the following items associated with it: +* `event:` + * This is listed as part of the list item but in it's own `event:` header. The `event:` must exist within the suite + or it will be rejected. + * Events can be lists or ranges. +* `state:` + * This will identify the state of the task or family in the trigger. States are generally `active`, `complete`, or + `queued`. +* `suite:` + * If the trigger is for a task within another suite, add the suite identifier to the trigger and it will look up + the full path to the trigger and add it. **NOTE:** The suite reference must exist within the definition file, this + will not work for `externs`. +* `operand:` + * By default multiple triggers are added to the node with __AND__ as their connection. However, in the event that + the triggers need to be added with an __OR__ statement, add the `operand: OR` kesuites: + prod00: + nodes: + gfs: + atmos: + post: + tasks: + jgfs_atmos_post_f( 4 ): + template: jgfs_atmos_post_master +```YAML +suites: + prod00: + nodes: + gfs: + tasks: + jgfs_forecast: + triggers: + - task: jgfs_atmos_analysis + event: release_fcst + - task: jgfs_wave_prep + state: complete + atmos: + analysis: + tasks: + jgfs_atmos_analysis: + events: + - release_fcst + wave: + prep: + tasks: + jgfs_wave_prep: +``` + +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + family atmos + family analysis + task jgfs_atmos_analysis + event release_fcst + endfamily + endfamily + family wave + family prep + task jgfs_wave_prep + endfamily + endfamily + task jgfs_forecast + trigger /prod00/gfs/atmos/analysis/jgfs_atmos_analysis:release_fcst + trigger -a /prod00/gfs/wave/prep/jgfs_wave_prep == complete + endfamily +endsuite +# enddef +``` + +#### Defstatus + +At the time of this README, the use case for the def status was to be able to add nodes to a definition file, and have +them marked as complete so that the ecflow run knows that the script is there but acknowleges as done without having to +do anything. This is useful when running development tasks, that rely on an operational task, but the operational task +is already done and nothing else needs to be executed. + +To add defstatus to a task or family, add a `defstatus:` parameter underneath the node, not a dictionary, this will be +a key/value pair. It will have a value associated with it so the item will look like `defstatus: value` + +**NOTE:** A defstatus can be added to a family or a task object. Both are acceptable formats. + +**NOTE:** When a defstatus is defined for a parent object, all child nodes under the object inherit that so in the +example below, all families and tasks are considered complete and since the `template: skip` value is there for the +task, the script generator will not attempt to look for it in the script repo. + +* Example +```YAML +suites: + prod00: + nodes: + obsproc: + defstatus: complete + v1.0: + gfs: + atmos: + dump: + tasks: + jobsproc_gfs_atmos_dump: + template: skip + events: + - "release_sfcprep" +``` + +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family obsproc + defstatus complete + family v1.0 + family gfs + family atmos + family dump + task jobsproc_gfs_atmos_dump + event release_sfcprep + endfamily + endfamily + endfamily + endfamily + endfamily +endsuite +# enddef +``` + +### Repeats + +Repeats are in a standalone section because of the nature of how ecflow handles repeating tasks. Ecflow has multiple +methods for handling repeating tasks but they lack a lot of the specificity that one would hope. Trying to identify +something as simple as run every 6 hours for the next three days is a rather complex setup. With that, after adding +a repat, please double check the setup to make sure that the code has done the repeat type that you are looking to +accomplish. + +Repeats are declared with the `repeat:` key value and the value has a specific syntax as follows: + `YYYYMMDD(HH)? to YYYYMMDD(HH)? (by DD:HH:MM)?` +where the items in the `()?` are optional. + +The first value is the start time specified in year, month, day with a hour value as optional. The second value +is the end date in year, month, day format with an hour as an optional value. The third is the increment time in +day, hour and minute format. The day is optional as well in third value. It can be read as starting at the first +value, repeat until the second value is reached and increment by the third value. If no third value is specified +increment by 1 hour. + +The value `2022032400 to 2022042400` is valid as is the value `2022032400 to 2022042400 by 18:00`. + +* If the repeat string has the start and end dates on the same day, just a `time` string with a `date` option will +be used. +* If the repeat string has the start and end on different days but within a 24 hour window, there will be a start +date with a repeats and a time string added to the definition file. +* If the repeat spans multiple days, it requires a combination of time, date and crons in the definition file. + +To elaborate on the example below of `2022032400 to 2022042400 by 18:00`. That will be read as starting at 00Z on +March 24th 2022, run every 18 hours until April 24th 2022. This will be reflected in the definition file with a +`date` value of March 24th, `24.3.2022` to start, a `time` value of `00:00` indicating start, a relative `time` +value of `+18:00` to indicate that after running and waiting 18 hours, run again, and a `repeat` value +to indicate that this needs to happen 42 times to get to April 24th. + +* Example +```YAML +suites: + prod00: + nodes: + gfs: + repeat: "2022032400 to 2022042400 by 18:00" +``` + +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + repeat integer RUN 1 42 + time 00:00 + time +18:00 + date 24.3.2022 + endfamily +endsuite +# enddef +``` + +## Ranges and Lists + +If you need to have multiple forecast hours or have a similar node object with just a few characters difference, the +concept of ranges and lists will be very useful in this situation. Families, tasks, or even triggers and events can +have ranges or lists associated with them to shorten the creation of the definition YAML. The goal is to have one +line that can create multiple suites, familes, or tasks or even events. + +A range is a basic counting structure that follows the [Python range](https://docs.python.org/3.3/library/stdtypes.html?highlight=range#range) class object format. It is specified in one of the following three formats: +* `( $MAX_VALUE )` +* `( $START_VALUE, $MAX_VALUE )` +* `( $START_VALUE, $MAX_VALUE, $STEP )` + +As you can see from the examples, if only one value is specified then it uses that as the max value, if two, then a +start and end, and three includes an increment. It uses default values of 0 for the start value and 1 for the increment +if nothing else is specified. + +### Range Hierarchy + +The code also uses a heirarchy structure so that range values can be passed down to child node objects but also allowed +to modify them slightly. To use a parent counter, use the same notation as the list or range but do not put any values in +the notation. So if there is a range of `(4)` for a parent node and the child node has the notation `( )` in it then when +the parent node uses the value `1`, so will the child node. An example of this would be that if a parent node has a +string value like `jgfs_atmos_post_f( 4 )` there will be 4 objects created in the definition file, +`jgfs_atmos_post_f000`, `jgfs_atmos_post_f001`, `jgfs_atmos_post_f002`, `jgfs_atmos_post_f003`. +Then if that task has an edit that reads `FHR: 'f( )'` then the node `jgfs_atmos_post_f001` will have an edit that +reads `FHR: f001` and so on. + +If there is no maximum value as well, you can also modify the increment or start values. In the same example from +above if `jgfs_atmos_post_f( 4 )` is the node definition but you wanted the edit value to start at 1 instead of +0, then using `FHRGRP: '( 1, )'` which uses 1 as the start value but as you can see has no max value, will set the +value of the edit in node `jgfs_atmos_post_f001` to `FHRGRP: 002`. Similar can also be done for something like +the incremenet value so if the edit was specified as `FHRGRP: '( ,,6 )'` the value for the edit in node +`jgfs_atmos_post_f001` would be set to `FHRGRP: 006` because it would incrememnt by 6 but still use the same +parent counter for the base since no start or max value was specified. + +* Example +```YAML +suites: + prod00: + nodes: + gfs: + atmos: + post: + tasks: + jgfs_atmos_post_manager: + events: + - "release_postanl" + - "release_post( 4 )" + jgfs_atmos_post_f( 4 ): + template: jgfs_atmos_post_master + triggers: + - task: jgfs_atmos_post_manager + event: release_post( ) + edits: + FHRGRP: '( 1, )' + FHRLST: 'f( )' + FHR: 'f( )' + HR: '( )' +``` + +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + family atmos + family post + task jgfs_atmos_post_manager + event release_postanl + event release_post000 + event release_post001 + event release_post002 + event release_post003 + task jgfs_atmos_post_f000 + trigger /prod00/gfs/atmos/post/jgfs_atmos_post_manager:release_post000 + edit FHRGRP '001' + edit FHRLST 'f000' + edit FHR 'f000' + edit HR '000' + task jgfs_atmos_post_f001 + trigger /prod00/gfs/atmos/post/jgfs_atmos_post_manager:release_post001 + edit FHRGRP '002' + edit FHRLST 'f001' + edit FHR 'f001' + edit HR '001' + task jgfs_atmos_post_f002 + trigger /prod00/gfs/atmos/post/jgfs_atmos_post_manager:release_post002 + edit FHRGRP '003' + edit FHRLST 'f002' + edit FHR 'f002' + edit HR '002' + task jgfs_atmos_post_f003 + trigger /prod00/gfs/atmos/post/jgfs_atmos_post_manager:release_post003 + edit FHRGRP '004' + edit FHRLST 'f003' + edit FHR 'f003' + edit HR '003' + endfamily + endfamily + endfamily +endsuite +# enddef +``` + +Lists are similar to the ranges but use the `[ ]` bracket syntax. Items in the list can be of any type and will run +the same way as ranges. The list cane be either within a string or just a list format for YAML and both should be +interpreted by the generator properly. + +**NOTE:** Lists will also match ranges if they are equal in length. So if you have a range of four and a list of +four, when the first element of the range is used, the first element of the list is also used. + +**NOTE:** Lists do not inheret the parent values directly. They read the position but given the flexibility needed +it does not pass down the actual values. The code could be updated to do that easily if it turns out to be a +future need but due to potential conflicts, it was not set that way in this version. + +* Example +```YAML +suites: + prod00: + nodes: + gfs: + atmos: + post: + tasks: + jgfs_atmos_post_manager: + events: + - "release_postanl" + - "release_post( 4 )" + jgfs_atmos_post_f[000,001,002,003]: + template: jgfs_atmos_post_master + triggers: + - task: jgfs_atmos_post_manager + event: release_post[000,001,002,003] + edits: + FHRGRP: [ 'a', 'b', 'c', 'd' ] + HR: [1,2,3,4] +``` + +* Result +```bash +#5.8.1 +suite prod00 + edit ECF_HOME '/usr1/knevins/global-workflow/ecf' + edit ECF_INCLUDE '/usr1/knevins/global-workflow/ecf' + edit ACCOUNT 'fv3-cpu' + edit QUEUE 'PARTITION_BATCH' + edit MACHINE 'HERA' + edit RUN_ENVIR 'emc' + family gfs + family atmos + family post + task jgfs_atmos_post_manager + event release_postanl + event release_post000 + event release_post001 + event release_post002 + event release_post003 + task jgfs_atmos_post_f000 + trigger /prod00/gfs/atmos/post/jgfs_atmos_post_manager:release_post000 + edit FHRGRP 'a' + edit HR '001' + task jgfs_atmos_post_f001 + trigger /prod00/gfs/atmos/post/jgfs_atmos_post_manager:release_post001 + edit FHRGRP 'b' + edit HR '002' + task jgfs_atmos_post_f002 + trigger /prod00/gfs/atmos/post/jgfs_atmos_post_manager:release_post002 + edit FHRGRP 'c' + edit HR '003' + task jgfs_atmos_post_f003 + trigger /prod00/gfs/atmos/post/jgfs_atmos_post_manager:release_post003 + edit FHRGRP 'd' + edit HR '004' + endfamily + endfamily + endfamily +endsuite +# enddef + +``` + +# Running the Utility + +## Run the utility +1. Change into the workflow directory: +` cd global-workflow/workflow_generator` +1. Run the utility +``` +python3 setup_workflow.py --expdir ../parm/config +``` + +### Command Line Options +* --ecflow-config + * Path to the YAML configuration file to use to generate the definition and folder/scripts. +* --expdir + * Path to the experiment directory. Must contain config.base. +* --savedir + * Path to where the definition file will be saved. Defaults to current directory. diff --git a/workflow/__init.py__ b/workflow/__init.py__ new file mode 100644 index 0000000000..e69de29bb2 diff --git a/workflow/applications.py b/workflow/applications.py new file mode 100644 index 0000000000..ba4a905c62 --- /dev/null +++ b/workflow/applications.py @@ -0,0 +1,510 @@ +#!/usr/bin/env python3 + +from typing import Dict, Any +from datetime import timedelta +from hosts import Host +from pygw.configuration import Configuration + +__all__ = ['AppConfig'] + + +def get_gfs_interval(gfs_cyc: int) -> str: + """ + return interval in hours based on gfs_cyc + """ + + gfs_internal_map = {'0': None, '1': '24:00:00', '2': '12:00:00', '4': '06:00:00'} + + try: + return gfs_internal_map[str(gfs_cyc)] + except KeyError: + raise KeyError(f'Invalid gfs_cyc = {gfs_cyc}') + + +def get_gfs_cyc_dates(base: Dict[str, Any]) -> Dict[str, Any]: + """ + Generate GFS dates from experiment dates and gfs_cyc choice + """ + + base_out = base.copy() + + gfs_cyc = base['gfs_cyc'] + sdate = base['SDATE'] + edate = base['EDATE'] + base_out['INTERVAL'] = '06:00:00' # Cycled interval is 6 hours + + interval_gfs = get_gfs_interval(gfs_cyc) + + # Set GFS cycling dates + hrinc = 0 + hrdet = 0 + if gfs_cyc == 0: + return base_out + elif gfs_cyc == 1: + hrinc = 24 - sdate.hour + hrdet = edate.hour + elif gfs_cyc == 2: + if sdate.hour in [0, 12]: + hrinc = 12 + elif sdate.hour in [6, 18]: + hrinc = 6 + if edate.hour in [6, 18]: + hrdet = 6 + elif gfs_cyc == 4: + hrinc = 6 + sdate_gfs = sdate + timedelta(hours=hrinc) + edate_gfs = edate - timedelta(hours=hrdet) + if sdate_gfs > edate: + print('W A R N I N G!') + print('Starting date for GFS cycles is after Ending date of experiment') + print(f'SDATE = {sdate.strftime("%Y%m%d%H")}, EDATE = {edate.strftime("%Y%m%d%H")}') + print(f'SDATE_GFS = {sdate_gfs.strftime("%Y%m%d%H")}, EDATE_GFS = {edate_gfs.strftime("%Y%m%d%H")}') + gfs_cyc = 0 + + base_out['gfs_cyc'] = gfs_cyc + base_out['SDATE_GFS'] = sdate_gfs + base_out['EDATE_GFS'] = edate_gfs + base_out['INTERVAL_GFS'] = interval_gfs + + fhmax_gfs = {} + for hh in ['00', '06', '12', '18']: + fhmax_gfs[hh] = base.get(f'FHMAX_GFS_{hh}', base.get('FHMAX_GFS_00', 120)) + base_out['FHMAX_GFS'] = fhmax_gfs + + return base_out + + +class AppConfig: + + VALID_MODES = ['cycled', 'forecast-only'] + + def __init__(self, conf: Configuration) -> None: + + self.scheduler = Host().scheduler + + _base = conf.parse_config('config.base') + + self.mode = _base['MODE'] + + if self.mode not in self.VALID_MODES: + raise NotImplementedError(f'{self.mode} is not a valid application mode.\n' + + 'Valid application modes are:\n' + + f'{", ".join(self.VALID_MODES)}') + + self.model_app = _base.get('APP', 'ATM') + self.do_hybvar = _base.get('DOHYBVAR', False) + self.do_atm = _base.get('DO_ATM', True) + self.do_wave = _base.get('DO_WAVE', False) + self.do_wave_bnd = _base.get('DOBNDPNT_WAVE', False) + self.do_ocean = _base.get('DO_OCN', False) + self.do_ice = _base.get('DO_ICE', False) + self.do_aero = _base.get('DO_AERO', False) + self.do_bufrsnd = _base.get('DO_BUFRSND', False) + self.do_gempak = _base.get('DO_GEMPAK', False) + self.do_awips = _base.get('DO_AWIPS', False) + self.do_wafs = _base.get('WAFSF', False) + self.do_vrfy = _base.get('DO_VRFY', True) + self.do_fit2obs = _base.get('DO_FIT2OBS', True) + self.do_metp = _base.get('DO_METP', False) + self.do_jediatmvar = _base.get('DO_JEDIATMVAR', False) + self.do_jediatmens = _base.get('DO_JEDIATMENS', False) + self.do_jediocnvar = _base.get('DO_JEDIOCNVAR', False) + self.do_jedilandda = _base.get('DO_JEDILANDDA', False) + self.do_mergensst = _base.get('DO_MERGENSST', False) + + self.do_hpssarch = _base.get('HPSSARCH', False) + + self.wave_cdumps = None + if self.do_wave: + wave_cdump = _base.get('WAVE_CDUMP', 'BOTH').lower() + if wave_cdump in ['both']: + self.wave_cdumps = ['gfs', 'gdas'] + elif wave_cdump in ['gfs', 'gdas']: + self.wave_cdumps = [wave_cdump] + + self.lobsdiag_forenkf = False + self.eupd_cdumps = None + if self.do_hybvar: + self.lobsdiag_forenkf = _base.get('lobsdiag_forenkf', False) + eupd_cdump = _base.get('EUPD_CYC', 'gdas').lower() + if eupd_cdump in ['both']: + self.eupd_cdumps = ['gfs', 'gdas'] + elif eupd_cdump in ['gfs', 'gdas']: + self.eupd_cdumps = [eupd_cdump] + + # Get a list of all possible config_files that would be part of the application + self.configs_names = self._get_app_configs() + + # Source the config_files for the jobs in the application + self.configs = self._source_configs(conf) + + # Update the base config dictionary based on application + upd_base_map = {'cycled': self._cycled_upd_base, + 'forecast-only': self._forecast_only_upd_base} + try: + self.configs['base'] = upd_base_map[self.mode](self.configs['base']) + except KeyError: + raise NotImplementedError(f'{self.mode} is not a valid application mode.\n' + + 'Valid application modes are:\n' + + f'{", ".join(self.VALID_MODES)}') + + # Save base in the internal state since it is often needed + self._base = self.configs['base'] + + # Get more configuration options into the class attributes + self.gfs_cyc = self._base.get('gfs_cyc') + + # Finally get task names for the application + self.task_names = self.get_task_names() + + def _get_app_configs(self): + + configs_map = {'cycled': self._cycled_configs, + 'forecast-only': self._forecast_only_configs} + try: + configs_names = configs_map[self.mode] + except KeyError: + raise NotImplementedError(f'{self.mode} is not a valid application mode.\n' + + 'Valid application modes are:\n' + + f'{", ".join(self.VALID_MODES)}') + + return configs_names + + @property + def _cycled_configs(self): + """ + Returns the config_files that are involved in the cycled app + """ + + configs = ['prep'] + + if self.do_jediatmvar: + configs += ['atmanlinit', 'atmanlrun', 'atmanlfinal'] + else: + configs += ['anal', 'analdiag'] + + if self.do_jediocnvar: + configs += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy'] + + if self.do_ocean: + configs += ['ocnpost'] + + configs += ['sfcanl', 'analcalc', 'fcst', 'post', 'vrfy', 'fit2obs', 'arch'] + + if self.do_hybvar: + if self.do_jediatmens: + configs += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal'] + else: + configs += ['eobs', 'eomg', 'ediag', 'eupd'] + configs += ['ecen', 'esfc', 'efcs', 'echgres', 'epos', 'earc'] + + if self.do_metp: + configs += ['metp'] + + if self.do_gempak: + configs += ['gempak'] + + if self.do_bufrsnd: + configs += ['postsnd'] + + if self.do_awips: + configs += ['awips'] + + if self.do_wave: + configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] + if self.do_wave_bnd: + configs += ['wavepostbndpnt', 'wavepostbndpntbll'] + if self.do_gempak: + configs += ['wavegempak'] + if self.do_awips: + configs += ['waveawipsbulls', 'waveawipsgridded'] + + if self.do_wafs: + configs += ['wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25'] + + if self.do_aero: + configs += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + + if self.do_jedilandda: + configs += ['preplandobs', 'landanlinit', 'landanlrun', 'landanlfinal'] + + return configs + + @property + def _forecast_only_configs(self): + """ + Returns the config_files that are involved in the forecast-only app + """ + + configs = ['coupled_ic', 'fcst', 'arch'] + + if self.do_atm: + configs += ['post', 'vrfy'] + + if self.do_aero: + configs += ['aerosol_init'] + + if self.do_ocean or self.do_ice: + configs += ['ocnpost'] + + if self.do_atm and self.do_metp: + configs += ['metp'] + + if self.do_gempak: + configs += ['gempak'] + + if self.do_awips: + configs += ['awips'] + + if self.do_wave: + configs += ['waveinit', 'waveprep', 'wavepostsbs', 'wavepostpnt'] + if self.do_wave_bnd: + configs += ['wavepostbndpnt', 'wavepostbndpntbll'] + if self.do_gempak: + configs += ['wavegempak'] + if self.do_awips: + configs += ['waveawipsbulls', 'waveawipsgridded'] + + if self.do_wafs: + configs += ['wafs', 'wafsgrib2', 'wafsblending', 'wafsgcip', 'wafsgrib20p25', 'wafsblending0p25'] + + return configs + + @staticmethod + def _cycled_upd_base(base_in): + + return get_gfs_cyc_dates(base_in) + + @staticmethod + def _forecast_only_upd_base(base_in): + + base_out = base_in.copy() + base_out['INTERVAL_GFS'] = get_gfs_interval(base_in['gfs_cyc']) + base_out['CDUMP'] = 'gfs' + + return base_out + + def _source_configs(self, conf: Configuration) -> Dict[str, Any]: + """ + Given the configuration object and jobs, + source the configurations for each config and return a dictionary + Every config depends on "config.base" + """ + + configs = dict() + + # Return config.base as well + configs['base'] = conf.parse_config('config.base') + + # Source the list of all config_files involved in the application + for config in self.configs_names: + + # All must source config.base first + files = ['config.base'] + + if config in ['eobs', 'eomg']: + files += ['config.anal', 'config.eobs'] + elif config in ['eupd']: + files += ['config.anal', 'config.eupd'] + elif config in ['efcs']: + files += ['config.fcst', 'config.efcs'] + elif 'wave' in config: + files += ['config.wave', f'config.{config}'] + else: + files += [f'config.{config}'] + + print(f'sourcing config.{config}') + configs[config] = conf.parse_config(files) + + return configs + + def get_task_names(self): + + # Get a list of all possible tasks that would be part of the application + tasks_map = {'cycled': self._get_cycled_task_names, + 'forecast-only': self._get_forecast_only_task_names} + try: + task_names = tasks_map[self.mode]() + except KeyError: + raise NotImplementedError(f'{self.mode} is not a valid application mode.\n' + + 'Valid application modes are:\n' + + f'{", ".join(self.VALID_MODES)}') + + return task_names + + def _get_cycled_task_names(self): + """ + Get the task names for all the tasks in the cycled application. + Note that the order of the task names matters in the XML. + This is the place where that order is set. + """ + + gdas_gfs_common_tasks_before_fcst = ['prep'] + gdas_gfs_common_tasks_after_fcst = ['post'] + # if self.do_ocean: # TODO: uncomment when ocnpost is fixed in cycled mode + # gdas_gfs_common_tasks_after_fcst += ['ocnpost'] + gdas_gfs_common_tasks_after_fcst += ['vrfy'] + + gdas_gfs_common_cleanup_tasks = ['arch'] + + if self.do_jediatmvar: + gdas_gfs_common_tasks_before_fcst += ['atmanlinit', 'atmanlrun', 'atmanlfinal'] + else: + gdas_gfs_common_tasks_before_fcst += ['anal'] + + if self.do_jediocnvar: + gdas_gfs_common_tasks_before_fcst += ['ocnanalprep', 'ocnanalbmat', 'ocnanalrun', + 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy'] + + gdas_gfs_common_tasks_before_fcst += ['sfcanl', 'analcalc'] + + if self.do_aero: + gdas_gfs_common_tasks_before_fcst += ['aeroanlinit', 'aeroanlrun', 'aeroanlfinal'] + + if self.do_jedilandda: + gdas_gfs_common_tasks_before_fcst += ['preplandobs', 'landanlinit', 'landanlrun', 'landanlfinal'] + + wave_prep_tasks = ['waveinit', 'waveprep'] + wave_bndpnt_tasks = ['wavepostbndpnt', 'wavepostbndpntbll'] + wave_post_tasks = ['wavepostsbs', 'wavepostpnt'] + + hybrid_tasks = [] + hybrid_after_eupd_tasks = [] + if self.do_hybvar: + if self.do_jediatmens: + hybrid_tasks += ['atmensanlinit', 'atmensanlrun', 'atmensanlfinal', 'echgres'] + else: + hybrid_tasks += ['eobs', 'eupd', 'echgres'] + hybrid_tasks += ['ediag'] if self.lobsdiag_forenkf else ['eomg'] + hybrid_after_eupd_tasks += ['ecen', 'esfc', 'efcs', 'epos', 'earc'] + + # Collect all "gdas" cycle tasks + gdas_tasks = gdas_gfs_common_tasks_before_fcst.copy() + if not self.do_jediatmvar: + gdas_tasks += ['analdiag'] + + if self.do_wave and 'gdas' in self.wave_cdumps: + gdas_tasks += wave_prep_tasks + + gdas_tasks += ['fcst'] + + gdas_tasks += gdas_gfs_common_tasks_after_fcst + + if self.do_wave and 'gdas' in self.wave_cdumps: + if self.do_wave_bnd: + gdas_tasks += wave_bndpnt_tasks + gdas_tasks += wave_post_tasks + + if self.do_fit2obs: + gdas_tasks += ['fit2obs'] + + gdas_tasks += gdas_gfs_common_cleanup_tasks + + # Collect "gfs" cycle tasks + gfs_tasks = gdas_gfs_common_tasks_before_fcst + + if self.do_wave and 'gfs' in self.wave_cdumps: + gfs_tasks += wave_prep_tasks + + gfs_tasks += ['fcst'] + + gfs_tasks += gdas_gfs_common_tasks_after_fcst + + if self.do_metp: + gfs_tasks += ['metp'] + + if self.do_wave and 'gfs' in self.wave_cdumps: + if self.do_wave_bnd: + gfs_tasks += wave_bndpnt_tasks + gfs_tasks += wave_post_tasks + if self.do_gempak: + gfs_tasks += ['wavegempak'] + if self.do_awips: + gfs_tasks += ['waveawipsbulls', 'waveawipsgridded'] + + if self.do_bufrsnd: + gfs_tasks += ['postsnd'] + + if self.do_gempak: + gfs_tasks += ['gempak'] + + if self.do_awips: + gfs_tasks += ['awips'] + + if self.do_wafs: + gfs_tasks += ['wafs', 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25', 'wafsblending', 'wafsblending0p25'] + + gfs_tasks += gdas_gfs_common_cleanup_tasks + + tasks = dict() + tasks['gdas'] = gdas_tasks + + if self.do_hybvar and 'gdas' in self.eupd_cdumps: + enkfgdas_tasks = hybrid_tasks + hybrid_after_eupd_tasks + tasks['enkfgdas'] = enkfgdas_tasks + + # Add CDUMP=gfs tasks if running early cycle + if self.gfs_cyc > 0: + tasks['gfs'] = gfs_tasks + + if self.do_hybvar and 'gfs' in self.eupd_cdumps: + enkfgfs_tasks = hybrid_tasks + hybrid_after_eupd_tasks + enkfgfs_tasks.remove("echgres") + tasks['enkfgfs'] = enkfgfs_tasks + + return tasks + + def _get_forecast_only_task_names(self): + """ + Get the task names for all the tasks in the forecast-only application. + Note that the order of the task names matters in the XML. + This is the place where that order is set. + """ + + tasks = ['coupled_ic'] + + if self.do_aero: + tasks += ['aerosol_init'] + + if self.do_wave: + tasks += ['waveinit'] + # tasks += ['waveprep'] # TODO - verify if waveprep is executed in forecast-only mode when APP=ATMW|S2SW + + tasks += ['fcst'] + + if self.do_atm: + tasks += ['post'] + + if self.model_app in ['S2S', 'S2SW', 'S2SWA', 'NG-GODAS']: + tasks += ['ocnpost'] + + if self.do_atm: + tasks += ['vrfy'] + + if self.do_atm and self.do_metp: + tasks += ['metp'] + + if self.do_wave: + if self.do_wave_bnd: + tasks += ['wavepostbndpnt', 'wavepostbndpntbll'] + tasks += ['wavepostsbs', 'wavepostpnt'] + if self.do_gempak: + tasks += ['wavegempak'] + if self.do_awips: + tasks += ['waveawipsbulls', 'waveawipsgridded'] + + if self.do_bufrsnd: + tasks += ['postsnd'] + + if self.do_gempak: + tasks += ['gempak'] + + if self.do_awips: + tasks += ['awips'] + + if self.do_wafs: + tasks += ['wafs', 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25', 'wafsblending', 'wafsblending0p25'] + + tasks += ['arch'] # arch **must** be the last task + + return {f"{self._base['CDUMP']}": tasks} diff --git a/workflow/ecFlow/__init.py__ b/workflow/ecFlow/__init.py__ new file mode 100644 index 0000000000..e69de29bb2 diff --git a/workflow/ecFlow/ecflow_definitions.py b/workflow/ecFlow/ecflow_definitions.py new file mode 100644 index 0000000000..0aea65710c --- /dev/null +++ b/workflow/ecFlow/ecflow_definitions.py @@ -0,0 +1,2010 @@ +#!/usr/bin/env python3 + +""" + PROGRAM: + Manage the ecflow definitions setup. The main purpose of this class is + to be called by the ecflow_setup.py module and create an Ecflowsuite + object for each suite. Then the processing for triggers, breaking + apart the list or loop strings and adding in the triggers an events + using the ecflow module to call things like ecflow.Task, + ecflow.Trigger, and so on. + + The creates a dictionary object of each of the items it creates and + then uses the ecfsuite dict to reference so a task's trigger will + reference an already existing task and thereby automatically + populate the full path to the task as opposed to a more + dynamic reference. + + At the bottom of this module are the custom objects created to extend + the ecflow classes for tasks, families, + etc. + AUTHOR: + Kyle Nevins + kyle.nevins@noaa.gov + FILE DEPENDENCIES: + None + OUTPUT: + None +""" +import sys +import os +import re +import shutil +from datetime import datetime, timedelta +try: + import ecflow +except ImportError as err: + raise ImportError(f"Error: Could not import ecflow module: {err}") + + +class Ecflowsuite: + """ + This class is the representation of an ecflow suite. It manages all of the + items within using a dictionary. Names for the tasks are in the dictonary + by their name so all task names in a suite need to be unique otherwise + you'll have some issues. + + Families in the dictionary are represented by the parent nodes combined + with the family name, so for example the family + gfs: + atmos: + post: + will be in the dictionary at the key gfs>atmos>post, referenced like so, + self.nodes[gfs>atmos>post]. That will contain all of the objects for + that family. + + Attributes + ---------- + ecfsuite_nodes : dict + Dictionary object that contains all the nodes within the suite. This + includes tasks, families, triggers, events, etc. + ecf_nodes : dict + While the ecfsuite_nodes dictionary tracks the actual ecflow API + defined nodes, this dictionary tracks the custom nodes that are + defined in the bottom of this module. + ecfhome : str + The path to the base for the ecf items. This includes the ecf scripts + repository and the storage location for all the suite script. In the + default, it is generally assumed to be the ecfGFS parameter from the + experiment setup. + build_tree : bool + A boolean that indicates if the application should build the folders + and scripts as part of the run. If this is false, the scripts and + folders are not created and assumed to already be in place. + ecfsuite : str + The name of the suite. + + Methods + ------- + add_suite(suite) + Creates the suite and if necessary creates the base folders. + + get_suite( ) + Get the EcfSuite object + + get_suite_name( ) + Returns the name of the suite + + add_ecfsuite_node(name, node) + Adds the ecfsuite node to the dictionary + + get_node(task) + Returns a specific node from the suite. + + add_edit(edit_dict, parent=None) + Adds an edit to either a suite, task, or family. The parent defines + what object will get the edit object. + + add_event(event, parent=None) + Adds an event to the parent node. Events can only be associated with + families or tasks so if the parent is None, nothing will be added. + This was done to avoid errors. + + add_defstatus(defstatus, parent=None) + Adds an defstatus to the parent node. Defstatus objects can only be + associated with families or tasks so if the parent is None, nothing + will be added. This was done to avoid errors. + + add_repeat(repeat, parent=None) + Adds in a repeat to the parent node. Repeats can be parts of a family, + task, or suite. If the parent is none it will be added to the suite. + + add_trigger(trigger, parent, state=None, event=None, suite=None, + suite_array=None, operand=None) + Adds a trigger to the parent node. Triggers can be added to families + and tasks. + + add_family(family, parents=None) + Adds a family to the suite. If the parents value is set to none, then + it will be added as a top level family. Otherwise, it will be added as + a sub-family to the parents. + + add_family_edits(edits, family, family_node, index) + Since the families and tasks are handled slightly differently with the + families being called from the ecflow_setup module and the tasks + being handled in this module, a separate function to add family edits + to the definition is required. + + add_task(task, parents, scriptrepo, template=None, + parent_node=None, index=None) + Adds a task to the parent node. If the build is set to true then the + method also calls the creation method in the EcfTask class to deploy + the script to the proper location. The script repo is where it will + look for the script. If template is set, it will look for that template + and then copy and change the name of the template at the destination to + the name of the task. + + add_task_edits(task, edit_dict, parent_node=None, index=None) + Adds edits to a task. This takes in the edit_dict and then calls the + add_edit method to apply them to that task. + + add_task_repeat(task, repeat) + Adds a repeats to task nodes. This function primarily breaks down the + tasks into lists or ranges based on the task string and then adds the + repeat to the breakout. + + add_task_defstatus(task, defstatus) + Adds a defstatus to a task node. This function breaks down the task + string into a range or list if necessary and then adds the calls the + add_defstatus method. + + add_task_events(task, events, parent_node=None, index=None) + Adds events to a task. This function breaks down the task string into + ranges or lists if necessary but also breaks down the events if those + are a list or range. It then passes the fully formed pieces to the + add_event method to add them to the suite. + + add_suite_triggers(task, triggers, suite_array, parents, parent_node=None, + index=None) + Adds triggers to a task. This is a fairly complex method and might be + able to be broken into smaller pieces at some point. The triggers + can be loops in themselves, based on a task with an event or a loop of + events. Or even a loop of other tasks from other suites. This function + breaks down the tasks themselves and then also any loop/list logic that + exists within the trigger and applies them to the task with the + add_trigger method. + """ + + def __init__(self, ecfsuite, ecfhome, build_tree=True): + """ + Parameters + ---------- + ecfhome : str + The path to the base for the ecf items. This includes the ecf + scripts repository and the storage location for all the suite + script. In the default, it is generally assumed to be the ecfGFS + parameter from the experiment setup. + build_tree : bool + A boolean that indicates if the application should build the + folders and scripts as part of the run. If this is false, the + scripts and folders are not created and assumed to already be + in place. + ecfsuite : str + The name of the suite. + """ + + # Initialize environment + self.ecfsuite_nodes = {} + self.ecf_nodes = {} + self.ecfhome = ecfhome + self.build_tree = build_tree + + # Create initial suite + self.ecfsuite = self.add_suite(ecfsuite) + + def add_suite(self, suite): + """ + Creates the suite object and if necessary creates the base folders. + + Parameters + ---------- + suite : str + Name of the suite object. + + Returns + ------- + new_suite : EcfSuite object + An EcfSuite object + """ + + new_suite = EcfSuite(f"{suite}") + if self.build_tree: + new_suite.generate_folders(self.ecfhome) + return new_suite + + def get_suite(self): + """ + Get the EcfSuite object + + Parameters + ---------- + None + + Returns + ------- + EcfSuite + The ecfsuite object that has all the contents + """ + + return self.ecfsuite + + def get_suite_name(self): + """ + Returns the name of the suite + + Parameters + ---------- + None + + Returns + ------- + name : str + The name of the suite. + """ + + return self.ecfsuite.name() + + def add_ecfsuite_node(self, name, node): + """ + Adds the ecfsuite node to the dictionary + + Parameters + ---------- + name : str + The string name of the object + node : EcfNode + The actual node object. + """ + + self.ecfsuite_nodes[name] = node + + def get_node(self, node): + """ + Returns a specific task from the suite. + + Parameters + ---------- + task : str + The name of the task to lookup in the EcfNodes dictionary. + + Returns + ------- + EcfTask + An EcfTask that is an extension of the ecflow.task object. + """ + + return self.ecf_nodes[node] + + def add_edit(self, edit_dict, parent=None): + """ + Adds an edit to either a suite, task, or family. The parent defines + what object will get the edit object. + + Parameters + ---------- + edit_dict : dict + Dictionary object that contains the edits in the form of + {"edit" : "value"} + parent : str + String for the parent node that will get the edits added. + + Returns + ------- + None + """ + + if parent: + self.ecf_nodes[parent] += ecflow.Edit(edit_dict) + else: + self.ecfsuite += ecflow.Edit(edit_dict) + + def add_event(self, event, parent=None): + """ + Adds an event to the parent node. Events can only be associated with + families or tasks so if the parent is None, nothing will be added. + This was done to avoid errors. + + Parameters + ---------- + event : str + A string that is passed to the ecflow.Event object + parent : str + String for the parent node that will get the events added. + + Returns + ------- + None + """ + + if parent: + self.ecf_nodes[parent] += ecflow.Event(event) + + def add_defstatus(self, defstatus, parent=None): + """ + Adds an defstatus to the parent node. Defstatus objects can only be + associated with families or tasks so if the parent is None, nothing + will be added. This was done to avoid errors. + + Parameters + ---------- + defstatus : str + A string that is passed to the ecflow.Defstatus object + parent : str + String for the parent node that will get the defstatus added. + + Returns + ------- + None + """ + + if parent: + self.ecf_nodes[parent] += ecflow.Defstatus(defstatus) + + def add_repeat(self, repeat, parent=None): + """ + Adds in a repeat to the parent node. Repeats can be parts of a family, + task, or suite. If the parent is none it will be added to the suite. + + This will calculate the difference between the two dates and use the + interval value from the third entry to identify how often. Due to the + fact that ecflow has a very simplistic time/date/interval + implementation, this function can render the dates in multiple + different fashions. + + If the start and end are the same day, it'll just use a time set. If + it is different days, it'll do a relative time set with the dates and + also a start time. If it is multiple dates it will throw in repeats + based on relative values. + + Parameters + ---------- + repeat : str + This is a date string in the format of YYYYMMDDHH to YYYYMMDDHH by + DD:HH:MM. The hours on the second date string are optional as are + the day parameters in the time string. + parent : str + + Returns + ------- + None + """ + + repeat_token = re.search( + r"(\d{8,10})( | to )(\d{10})( | by )(\d{1,2}:)?(\d{1,2}:\d{1,2})", + repeat) + start = repeat_token.group(1).strip() + end = repeat_token.group(3).strip() + byday = repeat_token.group(5).strip() if repeat_token.group(5) is not \ + None else repeat_token.group(5) + bytime = repeat_token.group(6).strip() + + startdate = datetime.strptime(start, "%Y%m%d%H") if len(start) == 10 \ + else datetime.strptime(start, "%Y%m%d") + enddate = datetime.strptime(end, "%Y%m%d%H") + if byday is not None: + delta = timedelta(days=int(byday.split(':')[0]), + hours=int(bytime.split(':')[0]), + minutes=int(bytime.split(':')[1])) + else: + delta = timedelta(hours=int(bytime.split(':')[0]), + minutes=int(bytime.split(':')[1])) + + total_runtime = enddate - startdate + + if parent: + targetnode = self.ecf_nodes[parent] + else: + targetnode = self.ecfsuite + + try: + if total_runtime.total_seconds() < delta.total_seconds(): + raise ConfigurationError + except ConfigurationError: + if parent: + print(f"Node: {parent} - " + "Repeat has a greater increment than total time.") + else: + print(f"Suite: {self.get_suite_name()} - " + "Repeat has a greater increment than total time.") + sys.exit(1) + + # Setup the start date. + targetnode += ecflow.Date(f"{startdate.strftime('%d.%m.%Y')}") + + # If the dates are the same day, we only need a time string: + if startdate.date() == enddate.date(): + deltahours, deltaminutes = delta.seconds // 3600, delta.seconds // 60 % 60 + time_string = (f"{startdate.strftime('%H:%M')} " + f"{enddate.strftime('%H:%M')} " + f"{deltahours:02}:{deltaminutes:02}") + targetnode += ecflow.Time(time_string) + # If the days don't match up, we'll need to do some repeats. + else: + deltahours, deltaminutes = delta.seconds // 3600, delta.seconds // 60 % 60 + if delta.total_seconds() < 86400: + position_time = startdate + total_instances = 0 + while position_time <= enddate: + total_instances += 1 + position_time = position_time + delta + if len(start) == 10: + targetnode += ecflow.Time(f"{startdate.strftime('%H:%M')}") + else: + targetnode += ecflow.Today(ecflow.TimeSlot(0, 0), True) + targetnode += ecflow.Time(deltahours, deltaminutes, True) + targetnode += ecflow.RepeatInteger("RUN", 1, total_instances) + else: + if deltahours == 0 and deltaminutes == 0: + position_time = startdate + delta + if len(start) == 10: + targetnode += ecflow.Time(f"{startdate.strftime('%H:%M')}") + else: + targetnode += ecflow.Time(00, 00, True) + while position_time <= enddate: + position_string = f"{position_time.strftime('%d.%m.%Y')}" + targetnode += ecflow.Date(position_string) + position_time = position_time + delta + else: + position_time = startdate + while position_time <= enddate: + targetnode += ecflow.Cron(position_time.strftime('%H:%M'), + days_of_month=[int(position_time.strftime('%d'))], + months=[int(position_time.strftime('%m'))]) + position_time = position_time + delta + + def add_trigger(self, trigger, parent, state=None, event=None, suite=None, + suite_array=None, operand=None): + """ + Adds a trigger to the parent node. Triggers can be added to families + and tasks. + + Parameters + ---------- + trigger : str + The trigger string to add to the parent node. + parent : str + The parent node that will accept the trigger + state : str + The state of the trigger. Generally looking for complete, active, + or queued. + event : str + If there is an event associated with a task, this will add it to + the trigger definition. + suite : str + If the trigger is looking outside the current suite, this will + pull in the details from the other suites and attach the trigger. + suite_array : dict + This is the array of suites in the event that the suite value is + populated, the details of the suite need to be made available to + the function + operand : bool + This is a true/false value that is looking to define if the trigger + is an AND or an OR. If it is TRUE it is an AND, if it is FALSE, it + is an OR. + + Returns + ------- + None + """ + + if suite is not None: + try: + trigger_path = suite_array[suite].get_node(trigger).get_abs_node_path() + if state is None and event is None: + add_trigger = ecflow.Trigger(f"{trigger_path} == complete") + elif state is not None and event is None: + add_trigger = ecflow.Trigger(f"{trigger_path} == {state}") + elif state is None and event is not None: + add_trigger = ecflow.Trigger(f"{trigger_path}:{event}") + except KeyError as e: + print(f"Suite {suite} for task/trigger {parent}/{trigger}" + " is not available. Please check the configuration file.") + print("Error {e}") + sys.exit(1) + else: + try: + if state is None and event is None: + add_trigger = ecflow.Trigger([self.ecf_nodes[trigger]]) + elif state is not None and event is None: + trigger_path = self.ecf_nodes[trigger].get_abs_node_path() + add_trigger = ecflow.Trigger(f"{trigger_path} == {state}") + elif state is None and event is not None: + trigger_path = self.ecf_nodes[trigger].get_abs_node_path() + add_trigger = ecflow.Trigger(f"{trigger_path}:{event}") + except KeyError as e: + print(f"The node/trigger {parent}/{trigger} is not available " + f"in suite {self.get_suite_name()}." + " Please check the configuration file.") + print(f"Error {e}") + sys.exit(1) + if (operand is not None and + self.ecf_nodes[parent].get_trigger() is not None): + add_trigger = ecflow.Trigger(add_trigger.get_expression(), operand) + self.ecf_nodes[parent].add(add_trigger) + + def add_family(self, family, parents=None): + """ + Adds a family to the suite. If the parents value is set to none, then + it will be added as a top level family. Otherwise, it will be added as + a sub-family to the parents. + + Parameters + ---------- + family : str + The name of the family that is to be added to the suite. + parents : str + The string representation of the parent nodes that the family needs + to be added to. + + Returns + ------- + None + """ + + family_name = f"{parents}>{family}" if parents else family + + # If the name already exists, the family already exists + if family_name not in self.ecf_nodes.keys(): + self.ecf_nodes[family_name] = EcfFamily(family) + if self.build_tree: + self.ecf_nodes[family_name].generate_folders(self.ecfhome, + self.get_suite_name(), + parents) + + if parents: + self.ecf_nodes[parents] += self.ecf_nodes[family_name] + else: + self.ecfsuite += self.ecf_nodes[family_name] + + def add_family_edits(self, edits, family, family_node, index): + """ + Since the families and tasks are handled slightly differently with the + families being called from the ecflow_setup module and the tasks + being handled in this module, a separate function to add family edits + to the definition is required. + + Parameters + ---------- + edits : dict + The dictionary that contains the edits to be added to the family. + family : str + This is a string representation of the current family. If it is + a looping family, this will be the actual name for whatever the + index is using. + family_node : str + This is the string of the family node that includes any looping + mechanisms. This is necessary in case the edit needs to use the + index or is a loop mechanism itself. + index : int + The current position of the loop in the event that the family is + a loop so the edit value can reference the correct object. + + Returns + ------- + None + """ + + for node in edits: + edit_node = EcfEditNode(node, family_node) + value_node = EcfEditNode(edits[node], family_node) + for edit in edit_node.get_full_name_items(index): + for value in value_node.get_full_name_items(index): + self.add_edit({edit: value}, family) + + def add_task(self, task, parents, scriptrepo, template=None, + parent_node=None, index=None): + """ + Adds a task to the parent node. If the build is set to true then the + method also calls the creation method in the EcfTask class to deploy + the script to the proper location. The script repo is where it will + look for the script. If template is set, it will look for that template + and then copy and change the name of the template at the destination to + the name of the task. + + Parameters + ---------- + task : str + The name of the task + parents : str + The name of the parent nodes to get the task + scriptrepo : str + File path to the script repository to look for the task.ecf scripts + template : str + Name of the template file to use instead of searching for the name + of the task in the script repo. + parent_node : EcfNode + This is the parent node string that contains any looping details + if it is a list or a range object so that can be passed into the + task objects. + index : int + The current position of the parent_node in its loop so if the task + is dependent on the parent_node for position it is obtained from + this value. + + Returns + ------- + None + """ + task_node = EcfTaskNode(task, parent_node) + self.ecfsuite_nodes[task] = task_node + for task_name in task_node.get_full_name_items(index): + if task_name not in self.ecf_nodes.keys(): + self.ecf_nodes[task_name] = EcfTask(task_name) + self.ecf_nodes[task_name].setup_script(scriptrepo, template) + if self.build_tree: + self.ecf_nodes[task_name].generate_ecflow_task(self.ecfhome, + self.get_suite_name(), + parents) + self.ecf_nodes[parents] += self.ecf_nodes[task_name] + + def add_task_edits(self, task, edit_dict, parent_node=None, index=None): + """ + Adds edits to a task. This takes in the edit_dict and then calls the + add_edit method to apply them to that task. + + This function also breaks apart any lists or ranges that are passed in + to the tasks and applies it to all of them. It also applies any loop + logic that is applied to the parent task to the edits themselves. + + Parameters + ---------- + task : str + The name of the task. Can also include a list or range object in + the string. + edit_dict : dict + A dictionary of the edits that are to be applied to the tasks. + parent_node : str + This is the parent node string that contains any looping details + if it is a list or a range object so that can be passed into the + task objects. + index : int + The current position of the parent_node in its loop so if the edit + is dependent on the parent_node for position it is obtained from + this value. + + Returns + ------- + None + """ + + task_node = self.ecfsuite_nodes[task] + if len(task_node.get_full_name_items(index)) > 1: + node_for_edits = task_node + else: + node_for_edits = parent_node + for task_name in task_node.get_full_name_items(index): + task_index = task_node.get_full_name_items().index(task_name) + for node in edit_dict: + edit_node = EcfEditNode(node, node_for_edits) + value_node = EcfEditNode(edit_dict[node], node_for_edits) + for edit in edit_node.get_full_name_items(task_index): + for value in value_node.get_full_name_items(task_index): + self.add_edit({edit: value}, task_name) + + def add_task_repeat(self, task, repeat): + """ + Adds a repeats to task nodes. This function primarily breaks down the + tasks into lists or ranges based on the task string and then adds the + repeat to the breakout. + + Parameters + ---------- + task : str + The name of the task or list/range of tasks to add the repeat. + repeat : str + The repeat string to be passed to the add_repeat method. + + Returns + ------- + None + """ + + task_node = self.ecfsuite_nodes[task] + for task_name in task_node.get_full_name_items(index): + self.add_repeat(repeat, task_name) + + def add_task_defstatus(self, task, defstatus): + """ + Adds a defstatus to a task node. This function breaks down the task + string into a range or list if necessary and then adds the calls the + add_defstatus method. + + Parameters + ---------- + task : str + The task string to add the defstatus pieces to. Can be a range or + list as well. + defstatus : str + String that represents the defstatus, like complete. + + Returns + ------- + None + """ + + task_node = self.ecfsuite_nodes[task] + for task_name in task_node.get_full_name_items(index): + self.add_defstatus(defstatus, task_name) + + def add_task_events(self, task, events, parent_node=None, index=None): + """ + Adds events to a task. This function breaks down the task string into + ranges or lists if necessary but also breaks down the events if those + are a list or range. It then passes the fully formed pieces to the + add_event method to add them to the suite. + + Parameters + ---------- + task : str + The task string to add the event to. + events : str + The events string that will be added to the task. + parent_node : str + This is the parent node string that contains any looping details + if it is a list or a range object so that can be passed into the + task objects. + index : int + The current position of the parent_node in its loop so if the task + is dependent on the parent_node for position it is obtained from + this value. + + Returns + ------- + None + """ + + task_node = self.ecfsuite_nodes[task] + for task_name in task_node.get_full_name_items(index): + if task_node.is_list or task_node.is_range: + node_for_events = task_node + task_index = task_node.get_full_name_items().index(task_name) + else: + node_for_events = parent_node + task_index = index + for event_item in events: + event_node = EcfEventNode(event_item, node_for_events) + for node in event_node.get_full_name_items(task_index): + self.add_event(node, task_name) + + def add_suite_triggers(self, node, triggers, suite_array, parents, + parent_node=None, index=None): + """ + Adds triggers to a task. This is a fairly complex method and might be + able to be broken into smaller pieces at some point. The triggers + can be loops in themselves, based on a task with an event or a loop of + events. Or even a loop of other tasks from other suites. This function + breaks down the tasks themselves and then also any loop/list logic that + exists within the trigger and applies them to the task with the + add_trigger method. + + Parameters + ---------- + node : str + The task string, list, range or static, that is to be broken down + and then the triggers applied. + triggers : dict + The dictionary of triggers to add to the task. + suite_array : dict + In case the triggers are from another suite, this calls the trigger + from the other suite. + parents : str + The string of the parents, this is used to identify the family in + the event that the trigger is associated with a family. + parent_node : str + This is the parent node string that contains any looping details + if it is a list or a range object so that can be passed into the + task objects. + index : int + The current position of the parent_node in its loop so if the task + is dependent on the parent_node for position it is obtained from + this value. + + Returns + ------- + None + """ + + working_node = self.ecfsuite_nodes[node] + for item in working_node.get_full_name_items(index): + if working_node.get_type() == "family": + node_name = (f"{parents}>{item}") + else: + node_name = item + for trigger_item in triggers: + suite = None + operand = None + trigger_node = ecfTriggerNode(trigger_item, working_node) + if trigger_node.has_suite(): + suite = trigger_node.get_suite() + if trigger_node.has_operand(): + operand = trigger_node.get_operand() + + if working_node.is_list or working_node.is_range: + trigger_index = working_node. \ + get_full_name_items(index). \ + index(item) + else: + trigger_index = index + + for trigger in trigger_node.get_full_name_items(trigger_index): + if trigger_node.trigger_type == "family": + trigger_name = trigger.replace('/', '>') + else: + trigger_name = trigger + + if trigger_node.has_event(): + if trigger_node.is_list or trigger_node.is_range: + event_index = trigger_node.\ + get_full_name_items(index).\ + index(trigger_name) + elif working_node.is_list or working_node.is_range: + event_index = trigger_index + else: + event_index = index + trigger_events = trigger_node.get_event() + for event in trigger_events.\ + get_full_name_items(event_index): + if trigger_node.has_state(): + state = trigger_node.get_state() + if not isinstance(state, list): + state = [state] + for state_item in state: + self.add_trigger(trigger_name, node_name, + suite=suite, + suite_array=suite_array, + event=event, + state=state_item, + operand=operand) + else: + self.add_trigger(trigger_name, node_name, + suite=suite, + suite_array=suite_array, + event=event, + operand=operand) + else: + if trigger_node.has_state(): + state = trigger_node.get_state() + if not isinstance(state, list): + state = [state] + for state_item in state: + self.add_trigger(trigger_name, node_name, + suite=suite, + suite_array=suite_array, + state=state_item, + operand=operand) + else: + self.add_trigger(trigger_name, node_name, + suite=suite, + suite_array=suite_array, + operand=operand) + + +class EcfNode(): + """ + This is the base class for the other classes that are used to identify any + loops, lists, or what the item might be and also assign the name to the + object. This reduces the overhead for code and also makes it easier to + add in additional node type objects. Most of the objects extend this class + so this one is the main functions that apply to all node types. + + Attributes + ---------- + initial_count : int + In the event that the node is a range this value will hold the initial + count value for the object. + increment : int + In the event that the node is a range or list, this holds the amount + to increment the counter. + max_value : int + In the event that the node is a range or list, this holds the max value + associated with it. + name : str + Name of the object. + is_list : bool + If the node contains the [ ] list syntax. True if it does, false + otherwise. + items : array + Is the array of items within a list if the node object has a list + full_name_items : array + This is an array that consists of the counter plus any prefix or suffix + in the node string. + use_parent_counter : bool + If the node use a list or range syntax but has no internal values, + indicating that it should use the range of the parent node. + + Methods + ------- + get_name() + Returns the name of the node. + + __check_range(ecfitem) + Checks to see if the EcfNode is a loop. If it is, this function also + calls the supporting functions to set the range values, if there is + a max, min, interval, or list. + + invalid_range() + Helper function to ensure that the range is valid. Exits if it is not. + + get_items() + Returns the list of items for the ecf node so that it can be distributed + to the child nodes. + + get_full_name_items(counter=0) + If the item is a range or list, it returns the full names of the items + with the prefix and suffix strings included, if it is a single then it + just returns an array of one item. If it uses the parent counter it + returns an array of one item in the position of the parent counter. + + __set_max_value(range_token) + The range token is passed in and if only one value is set in the range + then it is set to max value and the initial is set to 0 and the + interval is set to 1. + + __set_initial_max_value(range_token) + If the range token is passed two parameters, they are assumed to be + the initial and max values. This sets those values for the node. The + interval is set to 1. + + __set_initial_increment_max_value(range_token) + If three values are sent in through the range token, this sets the max, + initial, and increment values. + + __setup_items_list(ecfparent) + In the event that the items list wasn't already defined, this sets up + the list of items for the node by pulling in the parent items if + necessary, modifying the increment setup as necessary as well. After + this method is called the items array should be fully populated. + + __populate_full_name_items() + Called after the items list is populated. If there is a range or list + then this uses the items list with the prefix and suffix strings to + create a new array for full names and populates that array. After this + is called the full_name_items array should be used. If the node isn't + a range or list then it is just an array of one item. + + get_full_name(counter=None) + This method uses the counter object if the item is a list to identify + the position in a list, the item in the range or if there is no counter + associated with it, the base name. + + get_position_name(counter=None) + This method uses the counter object if the item is a list to identify + the position in a list, the item in the range or if there is no counter + associated with it, the base name. + + has_max_value() + Returns true if the node object range has a maximum value. + + get_max_value() + Returns the maximum value for the node. + + get_range(initial_count=0, increment=1, max_value=1) + If the node has a list or range associated with it, this returns the + range of items or the range of the array. + """ + + def __init__(self, ecfitem, ecfparent=None): + """ + Parameters + ---------- + ecfitem : str + Name of the EcfNode item. If it contains a range or list + identifier, the other values are populated to identify what kind + of node it is. + ecfparent : str + Name of the parent for the EcfNode item. This will help determine + if the parent has the counter or if one is defined for this class + """ + self.__items = [] + self.__full_name_items = [] + self.__check_range(ecfitem) + self.__setup_items_list(ecfparent) + self.__populate_full_name_items() + if (ecfparent and self.__max_value is None and + (ecfparent.is_list or ecfparent.is_range) and + len(self.__items) == len(ecfparent.get_full_name_items())): + self.use_parent_counter = True + + def get_name(self): + """ + Returns the name of the node. + + Parameters + ---------- + None + + Returns + ------- + str + The name in string format. + """ + + return self.name + + def __check_range(self, ecfitem): + """ + Checks to see if the EcfNode is a loop. If it is, this function also + calls the supporting functions to set the range values, if there is + a max, min, interval, or list. + + The range is split into a tokenized array. + + Parameters + ---------- + ecfitem : str + The item that is to be processed. This is just the name of the + item, not the supporting dictionary if there is one. + + + Returns + ------- + bool + True if the node is a loop format defined by ( ). + """ + + self.is_list = False + self.is_range = False + self.use_parent_counter = False + self.__base = '' + self.__suffix = '' + self.initial_count = None + self.increment = None + self.__max_value = None + + if isinstance(ecfitem, str): + self.name = ecfitem + if re.search(r".*\(.*\).*", ecfitem): + self.is_range = True + range_functions = { + 1: self.__set_max_value, + 2: self.__set_initial_max_value, + 3: self.__set_initial_increment_max_value, + } + range_token = re.search(r"(.*)\((.*)\)(.*)", self.name) + range_type = range_token.group(2).strip().split(',') + self.__base = range_token.group(1).strip() + self.__suffix = range_token.group(3).strip() + range_functions.get(len(range_type), + self.invalid_range)(range_type) + elif re.search(r".*\[.*\].*", ecfitem): + self.is_list = True + list_token = re.search(r"(.*)\[(.*)\](.*)", ecfitem) + list_type = list_token.group(2).strip().split(',') + self.__base = list_token.group(1).strip() + self.__suffix = list_token.group(3).strip() + if not list_type[0]: + self.use_parent_counter = True + else: + self.__items = list_type + else: + self.__items = [ecfitem] + elif isinstance(ecfitem, list): + self.name = ''.join(str(i) for i in ecfitem) + self.is_list = True + self.__items = ecfitem + else: + self.name = ecfitem + self.is_list = False + self.__items = [ecfitem] + + def invalid_range(self): + """ + Helper function to ensure that the range is valid. Exits if it is not. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + print(f"The range specified in {self.name} is out of bounds. " + "Please review the configuration.") + sys.exit(1) + + def get_items(self): + """ + Returns the list of items for the ecf node so that it can be distributed + to the child nodes. + + Parameters + ---------- + None + + Returns + ------- + items : array + The array of items for the node. + """ + + return self.__items + + def get_full_name_items(self, counter=0): + """ + If the item is a range or list, it returns the full names of the items + with the prefix and suffix strings included, if it is a single then it + just returns an array of one item. If it uses the parent counter it + returns an array of one item in the position of the parent counter. + + Parameters + ---------- + counter : int + The position of the parent counter. If the parent counter is not + used it defaults to 0 and is not used. + + Returns + ------- + array + The array of the full name items either as a full list or an + array of one. + """ + + if self.use_parent_counter: + return [self.__full_name_items[counter]] + else: + return self.__full_name_items + + def __set_max_value(self, range_token): + """ + The range token is passed in and if only one value is set in the range + then it is set to max value and the initial is set to 0 and the + interval is set to 1. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + if not range_token[0]: + self.__max_value = None + self.use_parent_counter = True + else: + try: + self.__max_value = int(range_token[0]) + except TypeError: + print(f"Maximum value for {self.name} is not an integer") + sys.exit(1) + + def __set_initial_max_value(self, range_token): + """ + If the range token is passed two parameters, they are assumed to be + the initial and max values. This sets those values for the node. The + interval is set to 1. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + try: + self.initial_count = None if not range_token[0] else int(range_token[0]) + except TypeError: + print(f"Initial count value for {self.name} is not an integer") + sys.exit(1) + self.increment = None + if not range_token[1]: + self.__max_value = None + self.use_parent_counter = True + else: + try: + self.__max_value = int(range_token[1]) + except TypeError: + print(f"Maximum value for {self.name} is not an integer") + sys.exit(1) + + def __set_initial_increment_max_value(self, range_token): + """ + If three values are sent in through the range token, this sets the max, + initial, and increment values. + + Parameters + ---------- + range_token : array + The range token from the is_loop method. + + Returns + ------- + None + """ + + try: + self.initial_count = None if not range_token[0] else int(range_token[0]) + self.increment = None if not range_token[2] else int(range_token[2]) + except TypeError: + print(f"Initial count and increment values for {self.name} " + "are not integers") + sys.exit(1) + if not range_token[1]: + self.__max_value = None + self.use_parent_counter = True + else: + try: + self.__max_value = int(range_token[1]) + except TypeError: + print(f"Maximum value for {self.name} is not an integer") + sys.exit(1) + + def __setup_items_list(self, ecfparent): + """ + In the event that the items list wasn't already defined, this sets up + the list of items for the node by pulling in the parent items if + necessary, modifying the increment setup as necessary as well. After + this method is called the items array should be fully populated. + + Parameters + ---------- + ecfparent : str + The parent node in case the current node uses the parent counter. + This is passed in to get those parameters. + + Returns + ------- + None + """ + + # First check to see if the list object is already set or if this + # is even a list. If they are just return, nothing else to do. + if self.is_list: + return + if self.__items: + return + + if self.use_parent_counter: + if self.is_list: + self.__items = ecfparent.get_items() + elif self.is_range: + if self.initial_count is None: + self.initial_count = ecfparent.initial_count + if self.increment is None: + self.increment = ecfparent.increment + item_range = self.get_range(max_value=len(ecfparent.get_items())) + self.__items = [*item_range] + else: + if self.is_range and self.has_max_value(): + item_range = self.get_range() + self.__items = [*item_range] + + def __populate_full_name_items(self): + """ + Called after the items list is populated. If there is a range or list + then this uses the items list with the prefix and suffix strings to + create a new array for full names and populates that array. After this + is called the full_name_items array should be used. If the node isn't + a range or list then it is just an array of one item. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + if not self.is_range and not self.is_list: + self.__full_name_items = self.__items + return + + for item in self.__items: + if isinstance(item, int): + self.__full_name_items.append(f"{self.__base}" + f"{item:03}" + f"{self.__suffix}") + elif isinstance(item, str): + self.__full_name_items.append(f"{self.__base}" + f"{item}" + f"{self.__suffix}") + + def get_position_name(self, counter=None): + """ + This method uses the counter object if the item is a list to identify + the position in a list, the item in the range or if there is no counter + associated with it, the base name. + + Parameters + ---------- + counter : str or int + If it is a str, returns the list item in that position. If it is + an int, then return the counter position for it. + + Returns + ------- + None + """ + + try: + if self.is_range: + if isinstance(counter, int): + return f"{self.__base}{counter:03}{self.__suffix}" + elif isinstance(counter, str): + return f"{self.__base}{counter}{self.__suffix}" + elif re.search(r"\[.*\]", self.name): + array_item = self.__items[counter] + if isinstance(array_item, int): + return f"{self.__base}{array_item:03}{self.__suffix}" + elif isinstance(array_item, str): + return f"{self.__base}{array_item}{self.__suffix}" + elif self.is_list: + array_item = self.__items[counter] + if isinstance(array_item, int): + return f"{array_item:03}" + elif isinstance(array_item, str): + return f"{array_item}" + else: + return self.name + except ValueError as err: + print(f"Problem getting full name of {self.name}. Error: {err}") + + def has_max_value(self): + """ + Returns true if the node object range has a maximum value. + + Parameters + ---------- + None + + Returns + ------- + bool + True if there is a max value, false otherwise. + """ + + return True if self.__max_value is not None else False + + def get_max_value(self): + """ + Returns the maximum value for the node. + + Parameters + ---------- + None + + Returns + ------- + int + The max value if one is set. + """ + + return self.__max_value + + def get_range(self, initial_count=0, increment=1, max_value=1): + """ + If the node has a list or range associated with it, this returns the + range of items or the range of the array. + + Parameters + ---------- + initial_count : int + The initial count which is defaulted to 1 in case it wasn't defined + increment : int + The increment value to use for the range in case it wasn't defined + max_value : int + The maximum value for the range. + + Returns + ------- + None + """ + + if self.is_list: + return range(initial_count, len(self.__items), increment) + else: + if self.initial_count is not None: + initial_count = self.initial_count + if self.increment is not None: + increment = self.increment + if self.__max_value is not None: + max_value = self.__max_value + max_value = (max_value * increment) + initial_count + return range(initial_count, max_value, increment) + + +class EcfTaskNode(EcfNode): + """ + Extension class for the EcfNodes to identify tasks. + + Methods + ------- + get_type() + Returns that this node is a task type. + """ + + def get_type(self): + """ + Returns that this node is a task type. + + Parameters + ---------- + None + + Returns + ------- + str + The value of task to identify the node is a task. + """ + + return 'task' + + +class EcfFamilyNode(EcfNode): + """ + Extension class for the EcfNodes to identify tasks. + + Methods + ------- + get_type() + Returns that this node is a task type. + """ + + def get_type(self): + """ + Returns that this node is a task type. + """ + + return 'family' + + +class EcfEventNode(EcfNode): + """ + Extension class for the EcfNodes to identify events. + + Methods + ------- + get_type() + Returns that this node is an event type. + """ + + def get_type(self): + """ + Returns that this node is an event type. + """ + + return 'event' + + +class ecfTriggerNode(EcfNode): + """ + Extension class for the EcfNodes to identify triggers. Overloads the + constructors since triggers can have multiple levels within themselves + for events and such. + + Attributes + ---------- + EcfNode : object + This pulls in the attributes from the EcfNode class as well. + task_setup : dict + This stores the dictionary object that tracks the task setup for the + trigger. + ecfparent : str + The ecfparent string object used to track loops/list. This is used in + case there is a multi-level loop. + trigger_type : str + Used to identify if the trigger is of a family or of a task. + + Methods + ------- + get_type() + Returns that this node is a trigger type. + + has_operand() + If the trigger has an operand to indciate if it needs to be added as an + OR or AND in the trigger statement, set the value and return True, + otherwise false. + + get_operand() + Returns the operand associated with the trigger. + + get_state() + Returns the state associated with the trigger if one was defined. + + get_event() + Returns the event_string associated with the trigger if one was defined + + has_suite() + If a suite was passed in as part of the parameters in the keys, this + returns True and sets the suite attribute to the suite name. + + get_suite() + Returns the suite name. + + has_state() + If a state was passed in with the YAML parameters, return true and set + the state attribute to the state of the trigger. + + has_event() + If the trigger has an event associated with it, it is possible that the + event has a loop. This method determines if the trigger has an event + and if it does identifies the event string and items associated with it + so that it can be used in other functions later. If it does have the + loop or list identifiers then it returns true, otherwise false. + + invalid_event_range() + Helper method to exit the application if the event range is invalid. + """ + + def __init__(self, ecfitem, ecfparent=None): + """ + Parameters + ---------- + ecfItem : dict or str + A dictionary or string item that represents the current node. + """ + + self._EcfNode__items = [] + self._EcfNode__full_name_items = [] + if 'family' in ecfitem.keys(): + trigger_type = 'family' + else: + trigger_type = 'task' + self._EcfNode__check_range(ecfitem[trigger_type]) + self._EcfNode__setup_items_list(ecfparent) + self._EcfNode__populate_full_name_items() + self.task_setup = ecfitem + self.ecfparent = ecfparent + self.trigger_type = trigger_type + + def get_type(self): + """ + Returns that this node is a trigger type. + + Parameters + ---------- + None + + Returns + ------- + str + The value trigger to identify that this is a trigger node. + """ + + return 'trigger' + + def has_operand(self): + """ + If the trigger has an operand to indciate if it needs to be added as an + OR or AND in the trigger statement, set the value and return True, + otherwise false. + + Parameters + ---------- + None + + Returns + ------- + bool + True if there is an operand associated with the trigger. + """ + + if 'operand' in self.task_setup.keys(): + self.operand = self.task_setup['operand'] + return True + else: + return False + + def get_operand(self): + """ + Returns the operand associated with the trigger. + + Parameters + ---------- + None + + Returns + ------- + str or bool + Return the operand. + """ + + if self.operand == "OR" or self.operand == "or": + return False + else: + return True + + def get_state(self): + """ + Returns the state associated with the trigger if one was defined. + + Parameters + ---------- + None + + Returns + ------- + str + The state in string format. + """ + + return self.state + + def get_event(self): + """ + Returns the event_string associated with the trigger if one was defined + + Parameters + ---------- + None + + Returns + ------- + str + The event in string format. + """ + + return self.event + + def has_suite(self): + """ + If a suite was passed in as part of the parameters in the keys, this + returns True and sets the suite attribute to the suite name. + + Parameters + ---------- + None + + Returns + ------- + bool + If there is a suite associated with the trigger, return true + otherwise false. + """ + if 'suite' in self.task_setup.keys(): + self.suite = self.task_setup['suite'] + return True + else: + return False + + def get_suite(self): + """ + Returns the suite name. + + Parameters + ---------- + None + + Returns + ------- + str + The name of the suite in string format. + """ + return self.suite + + def has_state(self): + """ + If a state was passed in with the YAML parameters, return true and set + the state attribute to the state of the trigger. + + Parameters + ---------- + None + + Returns + ------- + bool + Returns true if there is a state value in the keys, otherwise + false. + """ + if 'state' in self.task_setup.keys(): + self.state = self.task_setup['state'] + return True + else: + return False + + def has_event(self): + """ + If the trigger has an event associated with it, it is possible that the + event has a loop. This method determines if the trigger has an event + and if it does identifies the event string and items associated with it + so that it can be used in other functions later. If it does have the + loop or list identifiers then it returns true, otherwise false. + + Parameters + ---------- + None + + Returns + ------- + bool + Returns true if the trigger has an event in either list or + string format. + """ + if 'event' in self.task_setup.keys(): + if self.is_list or self.is_range: + self.event = EcfEventNode(self.task_setup['event'], self) + else: + self.event = EcfEventNode(self.task_setup['event'], + self.ecfparent) + return True + else: + return False + + def invalid_event_range(self): + """ + Helper method to exit the application if the event range is invalid. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + print(f"The range specified in {self.name} is out of bounds. " + "Please review the configuration.") + sys.exit(1) + + +class EcfEventNode(EcfNode): + """ + Extension class for the EcfNodes to identify events. + + Methods + ------- + get_type() + Returns that this node is an event type. + """ + + def get_type(self): + """ + Returns that this node is an event type. + + Parameters + ---------- + None + + Returns + ------- + str + The string event to identify this as an event node. + """ + return 'event' + + +class EcfEditNode(EcfNode): + """ + Extension class for the EcfNodes to identify edits. + + Methods + ------- + get_type() + Returns that this node is an edit type. + """ + + def get_type(self): + """ + Returns that this node is an edit type. + + Parameters + ---------- + None + + Returns + ------- + str + The string edit to identify this as an edit node. + """ + + return 'edit' + + +class EcfRoot(): + """ + A root level class that is not an EcfNode object from above but an + object that will extend a class from the ecflow module. + + Methods + ------- + get_base_name() + Returns the prefix to a node. + """ + + def get_base_name(): + """ + Returns the prefix to a node. + * Not currently in use, may be removed at a later date. + + Parameters + ---------- + None + + Returns + ------- + str + The name of the node if it has a prefix, this strips out the + surrounding range and just returns the beginning. + """ + return re.search(r"(.*)\{.*\}", self.name()).group(1).strip() + + +class EcfSuite(ecflow.Suite, EcfRoot): + """ + Extends the EcfRoot and ecflow.Suite classes to provide an additional + function when defining the suite that also it can generate the folders + for the suite and populate the families/tasks. + + Methods + ------- + generate_folders(ecfhome) + This function uses the ecfhome directory as a base and if it doesn't + exist makes the suite folder at the ecfhome. + """ + + def generate_folders(self, ecfhome): + """ + This function uses the ecfhome directory as a base and if it doesn't + exist makes the suite folder at the ecfhome. + + Parameters + ---------- + ecfhome : str + Path to the root level directory for the ecfhome. + + Returns + ------- + None + """ + + folder_path = f"{ecfhome}/{self.name()}" + if not os.path.exists(folder_path): + os.makedirs(folder_path) + + +class EcfFamily(ecflow.Family, EcfRoot): + """ + Extends the ecflow.Family and EcfRoot classes to provide the folder + generation structure for families at the ecfhome location. + + Methods + ------- + generate_folders(ecfhome,suite,parents) + Uses the ecfhome as the root, then looks in the suite directory to + determine if the family name has been created. It also splits out the + parent folders to put everything in the proper tier. + """ + + def generate_folders(self, ecfhome, suite, parents): + """ + Uses the ecfhome as the root, then looks in the suite directory to + determine if the family name has been created. It also splits out the + parent folders to put everything in the proper tier. + + Parameters + ---------- + ecfhome : str + The root level directory as a string + suite : str + The suite name to be appended to the ecfhome. + parents : str + Any of the parent families to ensure that the folder structure is + setup correctly. + + Returns + ------- + None + """ + if parents: + folder_path = f"{ecfhome}/{suite}/{parents.replace('>','/')}/{self.name()}" + else: + folder_path = f"{ecfhome}/{suite}/{self.name()}" + if not os.path.exists(folder_path): + os.makedirs(folder_path) + + +class EcfTask(ecflow.Task, EcfRoot): + """ + Extends the ecflow.Task and EcfRoot classes to allow the task scripts to + be defined and then also created. If there is a template associated with + the task, it will use that to create the script name in the appropriate + location. + + Methods + ------- + setup_script(repopath,template) + Sets the parameters for the script if there is a repo path for the + script repo that isn't the default and template if that is also + defined for a task. + + generate_ecflow_task(ecfhome,suite,parents) + Uses the parameters passed in to define the folder path and then + looks in the script repository for the task name with a .ecf suffix or + template name with a .ecf suffix and then copies that script content + from the script repo over to the destination provided by the parameters + """ + + def setup_script(self, repopath, template): + """ + Sets the parameters for the script if there is a repo path for the + script repo that isn't the default and template if that is also + defined for a task. + + Parameters + ---------- + scriptrepo : str + Path to the script repository used to populate the destination. + template : str + The template script if needed so the application will use that + instead of searching for the task name in the script repo. + + Returns + ------- + None + """ + self.scriptrepo = repopath + self.template = template + + def generate_ecflow_task(self, ecfhome, suite, parents): + """ + Uses the parameters passed in to define the folder path and then + looks in the script repository for the task name with a .ecf suffix or + template name with a .ecf suffix and then copies that script content + from the script repo over to the destination provided by the parameters + + Parameters + ---------- + ecfhome : str + Path to the root level directory to place the scripts. + suite : str + Suite name to add the scripts to that will be appended to the + ecfhome + parents: str + Any parent folders that are appended to the ecfhome and suite + folders. + + Returns + ------- + None + """ + if self.template == "skip": + return + script_name = f"{self.name()}.ecf" + ecfscript = None + search_script = f"{self.template}.ecf" if self.template is not \ + None else script_name + if parents: + script_path = f"{ecfhome}/{suite}/{parents.replace('>','/')}/{script_name}" + else: + script_path = f"{ecfhome}/{suite}/{script_name}" + for root, dirs, files in os.walk(self.scriptrepo): + if search_script in files and ecfscript is None: + ecfscript = os.path.join(root, search_script) + elif script_name in files: + print(f"More than one script named {script_name}. " + "Using the first one found.") + try: + if ecfscript is not None: + shutil.copyfile(ecfscript, script_path, follow_symlinks=True) + else: + raise ConfigurationError + except ConfigurationError: + print(f"Could not find the script {search_script}. Exiting build") + sys.exit(1) + +# define Python user-defined exceptions + + +class Error(Exception): + """Base class for other exceptions""" + pass + + +class RangeError(Error): + """Raised when the range in the configuration file is incorrect""" + pass + + +class ConfigurationError(Error): + """Raised when there is an error in the configuration file.""" + pass diff --git a/workflow/ecFlow/ecflow_setup.py b/workflow/ecFlow/ecflow_setup.py new file mode 100644 index 0000000000..1170ebc479 --- /dev/null +++ b/workflow/ecFlow/ecflow_setup.py @@ -0,0 +1,726 @@ +#!/usr/bin/env python3 + +""" + PROGRAM: + ecflow_setup: This setup is to read in the configuration from the YAML + file passed in by setup_workflow.py, populate the environment variables + and then pass that to the ecflow_definitions.py module to create the + suite definitions and to break down the lists, add triggers, etc. + AUTHOR: + Kyle Nevins + kyle.nevins@noaa.gov + FILE DEPENDENCIES: + None + OUTPUT: + This will return a dictionary object of suites and then save that to + a file based on the calls from the setup_workflow.py module. +""" +import yaml +import collections.abc +import os +import re +import sys +import datetime +from ecFlow.ecflow_definitions import Ecflowsuite, EcfFamilyNode + +try: + from ecflow import Defs +except ImportError as err: + raise ImportError(f"Error: Could not import ecflow module: {err}") + + +class Ecflowsetup: + """ + This class pulls in the configurations from the ecflow config file. Then + it calls the ecflow_definitions.py module to create a suite definition + from each of the suites in the YAML file. Then each of the edits, tasks, + triggers, etc. are processed and added to the suite. + + All suites are then put together as part of a definition file and finally + that file is saved. + + Attributes + ---------- + suite_array : dict + A dictionary that contains ecflow.Suite objects provided by the ecflow + module. + DEFS : ecflow.Defs + A definition object provided by the ecflow module that holds all of the + suites. + + Methods + ------- + generate_workflow() + This is the main method, used to setup the suites from the YAML file and + then call each of the supporting methods, like edits, tasks, etc. to + populate the suites with each of the items. + + raiseexception(e) + This is just a simple method that is called if an exception is raised to + print out the error message and then call a sys.exit(1) so the app stops + + save() + This saves the suite definition file to the save dir with the file name + ecflow_suite.def. + + print() + Prints out what would be populated to the suite definition file. + + add_environment_edits(suite) + The suite is passed in and the edits from the environment are added. The + environment edits are defined in the init method. + + check_dict(node, key, key_is_dict=True) + This function checks for the presence of they key inside of the node. + Used to identify it various addons need to be added into the suite. + + add_suite_edits(suite, suite_dict) + Method used to parse through the YAML file and identify any edits that + apply to the suite itself and parse them so they can be added. + + process_definition_header() + If there is an externs section in the YAML file for a suite, this adds + the externs to the header. + + add_families(suite, nodes, parents=None) + Parses through the YAML file contents and adds the nodes that are + identified as families to either the parent suite or the parent family. + + add_tasks_and_edits(suite,nodes,parents=None) + After the families are added to the suite, the individual tasks, edits, + repeats, defstatus, and room for other task addons are appended. + + add_triggers_and_events(suite, nodes) + After the families and tasks are added, then the triggers and events + are processed. This needs to come after the families and tasks and + independently because of the interdependencies that exist. For example + a trigger for a task cannot be added until the task exists, otherwise + and error will be thrown. + """ + + def __init__(self, args, env_configs): + """ + Parameters + ---------- + args : dict + The arguments passed in by the command line. + env_configs : dict + The environment variables pulled in from the experiement directory. + + Returns + ------- + None + """ + + # Setup the base variables + self.args = args + self.env_configs = env_configs + self.suite_array = {} + self.DEFS = Defs() + + # Load in the ecflow configurations + base_ecflowconfig = load_ecflow_config(f'{args.ecflow_config}') + self.ecfconf = update_ecflow_config(base_ecflowconfig, env_configs) + + self.ecfhome = env_configs['base']['ECFgfs'] + + if 'scriptrepo' in self.ecfconf.keys(): + self.env_configs['base']['scriptrepo'] = self.ecfconf['scriptrepo'] + elif 'scriptrepo' not in self.env_configs['base'].keys(): + self.env_configs['base']['scriptrepo'] = f"{self.ecfhome}/scripts" + self.scriptrepo = self.env_configs['base']['scriptrepo'] + + # Setup the default edits from the environment + self.environment_edits = [ + 'ACCOUNT', + 'queue', + 'machine', + 'RUN_ENVIR', + ] + + def generate_workflow(self): + """ + This is the main method, used to setup the suites from the YAML file and + then call each of the supporting methods, like edits, tasks, etc. to + populate the suites with each of the items. + + Methods + ------- + get_suite_names(suitename) + In the event that the suite uses a list definition [X,Y,Z...], this + method will generate an array of the properly formatted names. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + def get_suite_names(suitename): + """ + In the event that the suite uses a list definition [X,Y,Z...], this + method will generate an array of the properly formatted names. + + This is internal to the generate_workflow method and is only called + from within. The names are split out using regex if it is a list. + + Parameters + ---------- + suitename : str + A string representation of the + + Returns + ------- + array + If not a list, returns an array with the suitename paraemeter as + the only object. If it is a list, return all the names. + """ + + # Check to see if the name actually has a list, if not return an + # array with just the suite name as object in place 0. + if not re.search(r".*\[.*\].*", suitename): + return [f"{suitename}"] + + # If the name does have a list, break apart the prefix and suffix + # from the list and then run it through a for loop to get all + # possible values. + name_token = re.search(r"(.*)\[(.*)\](.*)", suitename) + base = name_token.group(1).strip() + list_items = name_token.group(2).strip().split(',') + suffix = name_token.group(3).strip() + name_array = [] + for item in list_items: + name_array.append(f"{base}{item}{suffix}") + return name_array + + # Add in extern headers + self.process_definition_header() + + # Process each of the suites + for suite in self.ecfconf['suites'].keys(): + if suite not in {'externs', 'edits'}: + for suite_name in get_suite_names(suite): + # The first thing to do is add in all of the families and + # tasks. Triggers and edits cannot be added until the tasks + # and families are parsed. + if suite_name not in self.suite_array.keys(): + new_suite = Ecflowsuite(suite_name, self.env_configs['base']['ECFgfs']) + else: + new_suite = self.suite_array[suite_name] + if new_suite.get_suite_name() not in self.suite_array.keys(): + self.add_environment_edits(new_suite) + self.add_suite_edits(new_suite, self.ecfconf['suites'][suite]) + if self.check_dict(self.ecfconf['suites'][suite], 'nodes'): + self.add_families(new_suite, self.ecfconf['suites'][suite]['nodes']) + self.add_tasks_and_edits(new_suite, self.ecfconf['suites'][suite]['nodes']) + self.suite_array[new_suite.get_suite_name()] = new_suite + + # Now that the families and tasks are setup, run through the triggers + # and events and add them to the respective tasks/family objects. + for suite in self.ecfconf['suites'].keys(): + if suite not in {'externs', 'edits'}: + for suite_name in get_suite_names(suite): + if self.check_dict(self.ecfconf['suites'][suite], 'nodes'): + self.add_triggers_and_events(self.suite_array[suite_name], + self.ecfconf['suites'][suite]['nodes']) + + # Add each suite to the definition object that will be used for the save + # or print. + for suite_name, suite in self.suite_array.items(): + self.DEFS += suite.get_suite() + + def raiseexception(self, e): + """ + This is just a simple method that is called if an exception is raised to + print out the error message and then call a sys.exit(1) so the app stops + + Calling this method will cause the application to exit with a status + code of 1. + + Parameters + ---------- + e : str + The error in string format to print out. + + Returns + ------- + None + """ + + print(e) + sys.exit(1) + + def save(self): + """ + This saves the suite definition file to the save dir with the file name + ecflow_suite.def. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + print("Saving definition File") + savedir = self.args.savedir + defs_file = f"{savedir}/ecflow_suite.def" + self.DEFS.save_as_defs(defs_file) + + def print(self): + """ + Prints out what would be populated to the suite definition file. + + Parameters + ---------- + None + + Returns + ------- + None + """ + print(self.DEFS.check()) + print(self.DEFS) + + def add_environment_edits(self, suite): + """ + The suite is passed in and the edits from the environment are added. The + environment edits are defined in the init method. + + This method assumes that there are environment edits that have been set + by the experiement setup. + + Parameters + ---------- + suite : str + The name of the suite that will be used to add the environment edits + + Returns + ------- + None + """ + + # Add in the ECF Home and ECF Include edits. + suite.add_edit({'ECF_HOME': self.ecfhome, 'ECF_INCLUDE': self.ecfhome}) + + # Add in the edits for the environment. + for edit in self.environment_edits: + edit = edit.upper() + if (edit in self.env_configs['base'].keys() and + self.env_configs['base'][edit] is not None): + edit_dict = {edit: self.env_configs['base'][edit]} + elif (edit.lower() in self.env_configs['base'].keys() and + self.env_configs['base'][edit.lower()] is not None): + edit_dict = {edit: self.env_configs['base'][edit.lower()]} + suite.add_edit(edit_dict) + + def check_dict(self, node, key, key_is_dict=True): + """ + This function checks for the presence of they key inside of the node. + Used to identify it various addons need to be added into the suite. + + If the node is a dict, it checks for the presence of the key but it also + needs to know if the key it is looking for is a dictionary or not. + + Parameters + ---------- + node : dict or str + The dictionary or string object to search for the presence of the + key + key : str + The search string to look for in the node objects + key_is_dict : bool + Checks if the key is a dictionary or if it should be searching for + a string. + + Returns + ------- + bool + True if the key is present, false otherwise. + """ + + if isinstance(node, dict) and f'{key}' in node.keys(): + if key_is_dict and isinstance(node[f'{key}'], dict): + return True + elif not key_is_dict: + return True + else: + return False + + def add_suite_edits(self, suite, suite_dict): + """ + Method used to parse through the YAML file and identify any edits that + apply to the suite itself and parse them so they can be added. + + Parameters + ---------- + suite : string + Name of the suite that needs the edits added. + suite_dict : + The dictionary for the suite that was passed in. + + Returns + ------- + None + """ + + # Baseline edits + if 'edits' in self.ecfconf['suites'].keys(): + suite.add_edit(self.ecfconf['suites']['edits']) + + # Setup sutite specific edits + if type(suite_dict) is dict and 'edits' in suite_dict.keys(): + suite.add_edit(suite_dict['edits']) + + def process_definition_header(self): + """ + If there is an externs section in the YAML file for a suite, this adds + the externs to the header. + + Parameters + ---------- + None + + Returns + ------- + None + """ + + if 'externs' in self.ecfconf.keys(): + for extern in self.ecfconf['externs']: + self.DEFS.add_extern(extern) + + def add_families(self, suite, nodes, parents=None, parent_node=None): + """ + Parses through the YAML file contents and adds the nodes that are + identified as families to either the parent suite or the parent family. + + This function is recursive to build the family architecture. + + While adding families, this method also adds in the edits, repeats, + defstatus, and time parameters to the families. + + Parameters + ---------- + suite : str + The suite that the families are to be added to + nodes : dict + The nodes within the suite, can be families or tasks but only the + families are processed in this method. + parents : str + If this family is not a top level one for the suite, this string is + the list of families that came before it, used to populate the + dictionary object in the ecflow_definitions module. + parent_node : dict + This is the node for the parent object. Separate from the parents + object, this contains the full name of the parent. The parents + object is a string and doesn't contain the information for any + loop object. + + Returns + ------- + None + """ + + for item in nodes.keys(): + if isinstance(nodes[item], dict) and item not in {'edits', + 'tasks', + 'triggers'}: + family_node = EcfFamilyNode(item, parent_node) + suite.add_ecfsuite_node(item, family_node) + for family in family_node.get_full_name_items(): + suite.add_family(family, parents) + index = family_node.get_full_name_items().index(family) + if parents: + family_path = f"{parents}>{family}" + else: + family_path = family + if self.check_dict(nodes[item], 'edits'): + suite.add_family_edits(nodes[item]['edits'], + family_path, family_node, index) + if self.check_dict(nodes[item], 'repeat', False): + suite.add_repeat(nodes[item]['repeat'], family_path) + if self.check_dict(nodes[item], 'defstatus', False): + suite.add_defstatus(nodes[item]['defstatus'], + family_path) + self.add_families(suite, nodes[item], + family_path, family_node) + + def add_tasks_and_edits(self, suite, nodes, + parents=None, parent_node=None, + index=None): + """ + After the families are added to the suite, the individual tasks, edits, + repeats, defstatus, and room for other task addons are appended. + + This is a recursive function that parses through the whole dictionary + of tasks and families to identify any tasks and add them to a family. + + This also adds in the defstatus, ediuts, repeats, times, etc. for the + tasks. + + Parameters + ---------- + suite : str + The suite the tasks need to be added to. + nodes : dict + Contains all the tasks and families for the parent node. + parents : str + The parent family for any of the tasks + parent_node : dict + This is the actual parent node that would contain any looping + information or range information unlike the parent string which + contains the full name of the parents. + index : int + This is the index position of the current node being worked. This + is tracked so if the current node relies on the parent index, this + tells the current node what position object to use. + + Returns + ------- + None + """ + + for item in nodes.keys(): + if isinstance(nodes[item], dict) and item == 'tasks': + for task in nodes['tasks'].keys(): + if self.check_dict(nodes['tasks'][task], 'template', False): + task_template = nodes['tasks'][task]['template'] + else: + task_template = None + updated_task = find_env_param(task, 'env.', + self.env_configs) + suite.add_task(updated_task, parents, + self.scriptrepo, task_template, + parent_node, index) + if self.check_dict(nodes['tasks'][task], + 'edits'): + suite.add_task_edits(updated_task, + nodes['tasks'][task]['edits'], + parent_node, index) + if self.check_dict(nodes['tasks'][task], + 'repeat', False): + suite.add_task_repeat(updated_task, + nodes['tasks'][task]['repeat'], + parent_node, index) + if self.check_dict(nodes['tasks'][task], + 'defstatus', False): + suite.add_task_defstatus(updated_task, + nodes['tasks'] + [task]['defstatus']) + + elif isinstance(nodes[item], dict) and item not in {'edits', + 'triggers'}: + family_node = EcfFamilyNode(item, parent_node) + for family in family_node.get_full_name_items(): + index = family_node.get_full_name_items().index(family) + if parents: + family_path = f"{parents}>{family}" + else: + family_path = family + self.add_tasks_and_edits(suite, nodes[item], + family_path, family_node, index) + + def add_triggers_and_events(self, suite, nodes, parents=None, + parent_node=None, index=None): + """ + After the families and tasks are added, then the triggers and events + are processed. This needs to come after the families and tasks and + independently because of the interdependencies that exist. For example + a trigger for a task cannot be added until the task exists, otherwise + and error will be thrown. + + This is a recursive function and will parse through each family/task + to identify the work. + + Parameters + ---------- + suite : str + The suite to key off for adding the triggers + nodes : dict + The families/tasks that need to be parsed. + parents : str + The parent family for any of the tasks + parent_node : dict + This is the actual parent node that would contain any looping + information or range information unlike the parent string which + contains the full name of the parents. + index : int + This is the index position of the current node being worked. This + is tracked so if the current node relies on the parent index, this + tells the current node what position object to use. + + Returns + ------- + None + """ + + for item in nodes.keys(): + if self.check_dict(nodes[item], 'triggers', False): + updated_family = find_env_param(item, 'env.', + self.env_configs) + suite.add_suite_triggers(updated_family, + nodes[item]['triggers'], + self.suite_array, parents, + parent_node, index) + elif isinstance(nodes[item], dict) and item == 'tasks': + for task in nodes['tasks'].keys(): + updated_task = find_env_param(task, 'env.', + self.env_configs) + if self.check_dict(nodes['tasks'][task], 'events', False): + suite.add_task_events(updated_task, + nodes['tasks'][task]['events'], + parent_node, index) + if self.check_dict(nodes['tasks'][task], 'triggers', False): + suite.add_suite_triggers(updated_task, + nodes['tasks'][task]['triggers'], + self.suite_array, parents, + parent_node, index) + elif isinstance(nodes[item], dict): + family_node = EcfFamilyNode(item, parent_node) + for family in family_node.get_full_name_items(): + index = family_node.get_full_name_items().index(family) + if parents: + family_path = f"{parents}>{item}" + else: + family_path = item + self.add_triggers_and_events(suite, nodes[item], + family_path, family_node, + index) + + +def load_ecflow_config(configfile): + """ + This is the function to safely load the configuration file for the ecflow + environment. This is the base YAML that is built specifically for this + application and then returns it. + + Parameters + ---------- + configfile : str + The path to the configuration file that is to be loaded as part of the + ecflow config. + + Returns + ------- + dict + The dictionary results of the YAML safe load from the configuration + file. + """ + + with open(configfile, 'r') as file: + base_config = yaml.safe_load(file) + return base_config + + +def find_env_param(node, value, envconfig): + """ + Since there are components of the configuration that might get passed in + that are supposed to be replaced by environment variables AFTER the + configuration file has been loaded, this function is called in some of the + Ecflowsetup functions to allow the replacement of those parameters as + needed. + + Parameters + ---------- + node : dict + A dictionary object of the items that need to be scanned for replacing + value : str + A string object that is the prefix to be scanned and then the value + identified after identifier string is replaced with an environment + variable. + envconfig : dict + The dictionary of existing environment variables that are read in from + the experiment setup. + + Returns + ------- + new_node : dict + The updated dictionary object that will replace the node object that + was passed in when the function was called. + """ + + new_node = node + if value in node: + variable_lookup = re.search(fr".*{value}([\dA-Za-z_]*)", node).group(1).strip() + if variable_lookup in os.environ: + if isinstance(os.environ[variable_lookup], datetime.datetime): + new_variable = os.environ[variable_lookup].strftime("%Y%m%d%H") + else: + new_variable = os.environ[variable_lookup] + else: + if isinstance(envconfig['base'][variable_lookup], + datetime.datetime): + new_variable = envconfig['base'][variable_lookup].strftime("%Y%m%d%H") + else: + new_variable = envconfig['base'][variable_lookup] + search_key = re.search(r"(.*)(env\.[\dA-Za-z_]*)(.*)", node) + new_node = f"{search_key.group(1)} {new_variable} {search_key.group(3)}" + return new_node + + +def update_ecflow_config(configfile, envconfig): + """ + After the YAML file that drives the application is loaded in, the configs + need to be updated with anything that has the env. prefix to it and replace + that value with the environment variable. + + Parameters + ---------- + configfile : dict + The dictionary of the YAML configuration file read in. + envconfig : dict + The dictionary of objects that were read in from the experiment setup + on the supercomputer. + + Returns + ------- + config : dict + The updated configuration with the environment variables replaced. + """ + + def runupdate(nested_dict, value): + """ + To scan through the entire nested dictionary the run update was an easy + local function to use to provide recursion given that the parent + function did not work properly when trying to use a recursive call. + + Parameters + ---------- + nested_dict : dict + The nested dictionary to scan and replace the values. + value : str + The string to search for the replacement, currently set to env. + + Returns + ------- + nested_dict : dict + The updated dictionary with all of the values replaced as necessary. + """ + for k, v in nested_dict.items(): + if isinstance(v, str) and value in v: + lookup = v.split('.') + variable_lookup = re.findall(r"[\dA-Za-z_]*", lookup[1])[0] + if variable_lookup in os.environ: + if isinstance(os.environ[variable_lookup], datetime.datetime): + nested_dict[k] = os.environ[variable_lookup].strftime("%Y%m%d%H") + else: + nested_dict[k] = os.environ[variable_lookup] + + else: + if isinstance(envconfig['base'][variable_lookup], datetime.datetime): + envvalue = envconfig['base'][variable_lookup].strftime("%Y%m%d%H") + else: + envvalue = envconfig['base'][variable_lookup] + nested_dict[k] = envvalue + elif isinstance(v, collections.abc.Mapping): + nested_dict[k] = runupdate(v, value) + return nested_dict + + config = runupdate(configfile, 'env.') + return config diff --git a/workflow/ecflow_build.yml b/workflow/ecflow_build.yml new file mode 100644 index 0000000000..4ca70da9a2 --- /dev/null +++ b/workflow/ecflow_build.yml @@ -0,0 +1,63 @@ +--- +# scriptrepo: /path/to/scripts +externs: +- "/prod18/enkfgdas/post" +suites: + fcstonly: + edits: + CYC: env.ARCH_CYC + EDATE: env.EDATE + nodes: + GFSApp: + edits: + NET: 'gfs' + gfs: + repeat: "2022032400 to 2022042400 by 18:0" + edits: + RUN: 'gfs' + atmos: + tasks: + jgfs_forecast: + triggers: + - task: jgfs_atmos_post_f( 2,1 ) + - task: jgfs_forecast + suite: fcstplus + post: + tasks: + jgfs_atmos_post_manager[ 1,2 ]: + template: jgfs_atmos_post_manager + triggers: + - task: jgfs_forecast + state: [active, complete] + operand: OR + events: + - "release_post(4)f" + jgfs_atmos_post_f(2,env.FHMAX_GFS): + template: jgfs_atmos_post_master + triggers: + - task: jgfs_atmos_post_manager[1,2] + event: "release_post(1,2)f" + events: + - "test_event" + edits: + FHRGRP: "( 1,,6 )" + FHRGRP2: [ a,b,c,d ] + FHRLST: "f[ a,b,3,4 ] -testing" + FHR: "f(1,6,)" + HR: "( )" + TEST: 'Test Edit' + wave: + init: + tasks: + jgfs_wave_init: + jgfs_forecast: + fcstplus: + edits: + CYC: '06' + nodes: + nonGFSApp: + tasks: + jgfs_forecast: + triggers: + - task: jgfs_getic # TODO: Not sure about this, leave as is + suite: fcstonly diff --git a/workflow/gsl_template_hera.xml b/workflow/gsl_template_hera.xml new file mode 100644 index 0000000000..6d3965076b --- /dev/null +++ b/workflow/gsl_template_hera.xml @@ -0,0 +1,159 @@ + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3 + batch + hera + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3 + batch + hera + 02:40:00 + 101:ppn=40:tpp=1 + + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3 + batch + hera + 01:00:00 + 1:ppn=40:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/scratch1/NCEPDEV/global/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + diff --git a/workflow/gsl_template_jet.xml b/workflow/gsl_template_jet.xml new file mode 100644 index 0000000000..3deba4e473 --- /dev/null +++ b/workflow/gsl_template_jet.xml @@ -0,0 +1,279 @@ + + + + + + + + + + + + + +]> + + + + &EXPDIR;/logs/@Y@m@d@H.log + + + 202211090000 202211090000 24:00:00 + + + + &JOBS_DIR;/makeinit_link.sh + + &PSLOT;_gfsinit_@H + gsd-fv3-dev + batch + xjet + 00:02:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsinit.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ICSDIR&ICSDIR; + CASE&CASE; + COMPONENT&COMPONENT; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H/model_data/atmos/input + + + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/gfs_data.tile6.nc + &ICSDIR;/@Y@m@d@H/gfs/&CASE;/INPUT/sfc_data.tile6.nc + + + + + + + + + &JOBS_DIR;/fcst.sh + + &PSLOT;_gfsfcst_@H + gsd-fv3-dev + batch + xjet + 04:00:00 + 168:ppn=24:tpp=1 + + + + + + + --export=NONE + + &ROTDIR;/logs/@Y@m@d@H/gfsfcst.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/input/sfc_data.tile6.nc + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/restart/@Y@m@d.@H0000.sfcanl_data.tile6.nc + + + + + + + + + _f000-f000 _f006-f006 _f012-f012 _f018-f018 _f024-f024 _f030-f030 _f036-f036 _f042-f042 _f048-f048 _f054-f054 _f060-f060 _f066-f066 _f072-f072 _f078-f078 _f084-f084 _f090-f090 _f096-f096 _f102-f102 _f108-f108 _f114-f114 _f120-f120 _f126-f126 _f132-f132 _f138-f138 _f144-f144 _f150-f150 _f156-f156 _f162-f162 _f168-f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + f000 f006 f012 f018 f024 f030 f036 f042 f048 f054 f060 f066 f072 f078 f084 f090 f096 f102 f108 f114 f120 f126 f132 f138 f144 f150 f156 f162 f168 + + + + &JOBS_DIR;/post.sh + + &PSLOT;_gfspost#grp#_@H + gsd-fv3-dev + batch + xjet + 00:20:00 + + 1:ppn=24:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfspost#grp#.log + + RUN_ENVIRemc + HOMEgfs&HOMEgfs; + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + FHRGRP#grp# + FHRLST#lst# + + + &ROTDIR;/gfs.@Y@m@d/@H//model_data/atmos/history/gfs.t@Hz.atm.log#dep#.txt + + + + + + + + + &JOBS_DIR;/vrfy.sh + + &PSLOT;_gfsvrfy_@H + gsd-fv3-dev + batch + xjet + 00:30:00 + 1:ppn=1:tpp=1 + &NATIVE_STR; + + &ROTDIR;/logs/@Y@m@d@H/gfsvrfy.log + + RUN_ENVIRemc + HOMEgfs/mnt/lfs1/BMC/gsd-fv3-test/jhender/scratch/gw_19may23 + EXPDIR&EXPDIR; + ROTDIR&ROTDIR; + ATCFNAME&ATCFNAME; + NETgfs + CDUMPgfs + RUNgfs + CDATE@Y@m@d@H + PDY@Y@m@d + cyc@H + COMROOT/lfs4/HFIP/hfv3gfs/glopara/com + DATAROOT&ROTDIR;/../RUNDIRS/&PSLOT; + + + + + + + + + diff --git a/workflow/hosts.py b/workflow/hosts.py new file mode 100644 index 0000000000..b97ac67d89 --- /dev/null +++ b/workflow/hosts.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python3 + +import os +from pathlib import Path + +from pygw.yaml_file import YAMLFile + + +__all__ = ['Host'] + + +class Host: + """ + Gather Host specific information. + """ + + SUPPORTED_HOSTS = ['HERA', 'ORION', 'JET', + 'WCOSS2', 'S4', 'CONTAINER'] + + def __init__(self, host=None): + + detected_host = self.detect() + + if host is not None and host != detected_host: + raise ValueError(f'detected host: "{detected_host}" does not match host: "{host}"') + + self.machine = detected_host + self.info = self._get_info + self.scheduler = self.info['SCHEDULER'] + + @classmethod + def detect(cls): + + machine = 'NOTFOUND' + container = os.getenv('SINGULARITY_NAME', None) + + if os.path.exists('/scratch1/NCEPDEV'): + machine = 'HERA' + elif os.path.exists('/work/noaa'): + machine = 'ORION' + elif os.path.exists('/lfs4/HFIP'): + machine = 'JET' + elif os.path.exists('/lfs/f1'): + machine = 'WCOSS2' + elif os.path.exists('/data/prod'): + machine = 'S4' + elif container is not None: + machine = 'CONTAINER' + + if machine not in Host.SUPPORTED_HOSTS: + raise NotImplementedError(f'This machine is not a supported host.\n' + + 'Currently supported hosts are:\n' + + f'{" | ".join(Host.SUPPORTED_HOSTS)}') + + return machine + + @property + def _get_info(self) -> dict: + + hostfile = Path(os.path.join(os.path.dirname(__file__), f'hosts/{self.machine.lower()}.yaml')) + try: + info = YAMLFile(path=hostfile) + except FileNotFoundError: + raise FileNotFoundError(f'{hostfile} does not exist!') + except IOError: + raise IOError(f'Unable to read from {hostfile}') + except Exception: + raise Exception(f'unable to get information for {self.machine}') + + return info diff --git a/workflow/hosts/container.yaml b/workflow/hosts/container.yaml new file mode 100644 index 0000000000..879be0bf31 --- /dev/null +++ b/workflow/hosts/container.yaml @@ -0,0 +1,24 @@ +BASE_GIT: '' +DMPDIR: '/home/${USER}' +PACKAGEROOT: '' +COMROOT: '' +COMINsyn: '' +HOMEDIR: '/home/${USER}' +STMP: '/home/${USER}' +PTMP: '/home/${USER}' +NOSCRUB: $HOMEDIR +SCHEDULER: none +ACCOUNT: '' +QUEUE: '' +QUEUE_SERVICE: '' +PARTITION_BATCH: '' +PARTITION_SERVICE: '' +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C96', 'C48'] diff --git a/workflow/hosts/hera.yaml b/workflow/hosts/hera.yaml new file mode 120000 index 0000000000..45a868d636 --- /dev/null +++ b/workflow/hosts/hera.yaml @@ -0,0 +1 @@ +hera_gsl.yaml \ No newline at end of file diff --git a/workflow/hosts/hera_emc.yaml b/workflow/hosts/hera_emc.yaml new file mode 100644 index 0000000000..ee12114484 --- /dev/null +++ b/workflow/hosts/hera_emc.yaml @@ -0,0 +1,24 @@ +BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git' +DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump' +PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara' +COMROOT: '/scratch1/NCEPDEV/global/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/scratch1/NCEPDEV/global/${USER}' +STMP: '/scratch1/NCEPDEV/stmp2/${USER}' +PTMP: '/scratch1/NCEPDEV/stmp4/${USER}' +NOSCRUB: $HOMEDIR +ACCOUNT: fv3-cpu +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: hera +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'YES' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/hera_gsl.yaml b/workflow/hosts/hera_gsl.yaml new file mode 100644 index 0000000000..3d9d97064b --- /dev/null +++ b/workflow/hosts/hera_gsl.yaml @@ -0,0 +1,24 @@ +BASE_GIT: '/scratch1/NCEPDEV/global/glopara/git' +DMPDIR: '/scratch1/NCEPDEV/global/glopara/dump' +PACKAGEROOT: '/scratch1/NCEPDEV/global/glopara/nwpara' +COMROOT: '/scratch1/NCEPDEV/global/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/scratch1/BMC/gsd-fv3-dev/NCEPDEV/global/${USER}' +STMP: '/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/' +PTMP: '/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa/FV3GFSrun/' +NOSCRUB: $HOMEDIR +ACCOUNT: gsd-fv3 +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: hera +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'YES' +HPSS_PROJECT: fim +LOCALARCH: 'NO' +ATARDIR: '/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/jet.yaml b/workflow/hosts/jet.yaml new file mode 120000 index 0000000000..ce0110730f --- /dev/null +++ b/workflow/hosts/jet.yaml @@ -0,0 +1 @@ +jet_gsl.yaml \ No newline at end of file diff --git a/workflow/hosts/jet_emc.yaml b/workflow/hosts/jet_emc.yaml new file mode 100644 index 0000000000..37fa746b87 --- /dev/null +++ b/workflow/hosts/jet_emc.yaml @@ -0,0 +1,24 @@ +BASE_GIT: '/lfs4/HFIP/hfv3gfs/glopara/git' +DMPDIR: '/lfs4/HFIP/hfv3gfs/glopara/dump' +PACKAGEROOT: '/lfs4/HFIP/hfv3gfs/glopara/nwpara' +COMROOT: '/lfs4/HFIP/hfv3gfs/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/lfs4/HFIP/hfv3gfs/${USER}' +STMP: '/lfs4/HFIP/hfv3gfs/${USER}/stmp' +PTMP: '/lfs4/HFIP/hfv3gfs/${USER}/ptmp' +NOSCRUB: $HOMEDIR +ACCOUNT: hfv3gfs +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: kjet +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'YES' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/jet_gsl.yaml b/workflow/hosts/jet_gsl.yaml new file mode 100644 index 0000000000..c1cde8b86d --- /dev/null +++ b/workflow/hosts/jet_gsl.yaml @@ -0,0 +1,25 @@ +BASE_GIT: '/lfs4/HFIP/hfv3gfs/glopara/git' +DMPDIR: '/lfs4/HFIP/hfv3gfs/glopara/dump' +PACKAGEROOT: '/lfs4/HFIP/hfv3gfs/glopara/nwpara' +COMROOT: '/lfs4/HFIP/hfv3gfs/glopara/com' +COMINsyn: '/lfs4/HFIP/hwrf-data/hwrf-input/SYNDAT-PLUS' +HOMEDIR: '/lfs1/BMC/gsd-fv3-test/NCEPDEV/global/$USER' +STMP: '/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun' +PTMP: '/home/Judy.K.Henderson/scratch1-test/gw_19may23/FV3GFSrun' +NOSCRUB: $HOMEDIR +ACCOUNT: gsd-fv3-dev +SCHEDULER: slurm +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: xjet +PARTITION_POST_BATCH: sjet +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'YES' +HPSS_PROJECT: fim +LOCALARCH: 'NO' +ATARDIR: '/BMC/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/orion.yaml b/workflow/hosts/orion.yaml new file mode 100644 index 0000000000..095f126898 --- /dev/null +++ b/workflow/hosts/orion.yaml @@ -0,0 +1,24 @@ +BASE_GIT: '/work/noaa/global/glopara/git' +DMPDIR: '/work/noaa/rstprod/dump' +PACKAGEROOT: '/work/noaa/global/glopara/nwpara' +COMROOT: '/work/noaa/global/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/work/noaa/global/${USER}' +STMP: '/work/noaa/stmp/${USER}' +PTMP: '/work/noaa/stmp/${USER}' +NOSCRUB: $HOMEDIR +SCHEDULER: slurm +ACCOUNT: fv3-cpu +QUEUE: batch +QUEUE_SERVICE: batch +PARTITION_BATCH: orion +PARTITION_SERVICE: service +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/s4.yaml b/workflow/hosts/s4.yaml new file mode 100644 index 0000000000..9c9866db21 --- /dev/null +++ b/workflow/hosts/s4.yaml @@ -0,0 +1,24 @@ +BASE_GIT: '/data/prod/glopara/git' +DMPDIR: '/data/prod/glopara/dump' +PACKAGEROOT: '/data/prod/glopara/nwpara' +COMROOT: '/data/prod/glopara/com' +COMINsyn: '${COMROOT}/gfs/prod/syndat' +HOMEDIR: '/data/users/${USER}' +STMP: '/scratch/users/${USER}' +PTMP: '/scratch/users/${USER}' +NOSCRUB: ${HOMEDIR} +ACCOUNT: star +SCHEDULER: slurm +QUEUE: s4 +QUEUE_SERVICE: serial +PARTITION_BATCH: s4 +PARTITION_SERVICE: serial +CHGRP_RSTPROD: 'NO' +CHGRP_CMD: 'ls' +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '${NOSCRUB}/archive_rotdir/${PSLOT}' +MAKE_NSSTBUFR: 'YES' +MAKE_ACFTBUFR: 'YES' +SUPPORTED_RESOLUTIONS: ['C384', 'C192', 'C96', 'C48'] diff --git a/workflow/hosts/wcoss2.yaml b/workflow/hosts/wcoss2.yaml new file mode 100644 index 0000000000..0f1086801e --- /dev/null +++ b/workflow/hosts/wcoss2.yaml @@ -0,0 +1,24 @@ +BASE_GIT: '/lfs/h2/emc/global/save/emc.global/git' +DMPDIR: '/lfs/h2/emc/global/noscrub/emc.global/dump' +PACKAGEROOT: '${PACKAGEROOT:-"/lfs/h1/ops/prod/packages"}' +COMROOT: '${COMROOT:-"/lfs/h1/ops/prod/com"}' +COMINsyn: '${COMROOT}/gfs/${gfs_ver:-"v16.2"}/syndat' +HOMEDIR: '/lfs/h2/emc/global/noscrub/${USER}' +STMP: '/lfs/h2/emc/stmp/${USER}' +PTMP: '/lfs/h2/emc/ptmp/${USER}' +NOSCRUB: $HOMEDIR +ACCOUNT: 'GFS-DEV' +SCHEDULER: pbspro +QUEUE: 'dev' +QUEUE_SERVICE: 'dev_transfer' +PARTITION_BATCH: '' +PARTITION_SERVICE: '' +CHGRP_RSTPROD: 'YES' +CHGRP_CMD: 'chgrp rstprod' +HPSSARCH: 'NO' +HPSS_PROJECT: emc-global +LOCALARCH: 'NO' +ATARDIR: '/NCEPDEV/${HPSS_PROJECT}/1year/${USER}/${machine}/scratch/${PSLOT}' +MAKE_NSSTBUFR: 'NO' +MAKE_ACFTBUFR: 'NO' +SUPPORTED_RESOLUTIONS: ['C768', 'C384', 'C192', 'C96', 'C48'] diff --git a/workflow/p8.sh b/workflow/p8.sh new file mode 100755 index 0000000000..c1c212f80a --- /dev/null +++ b/workflow/p8.sh @@ -0,0 +1,17 @@ +USER=Judy.K.Henderson +GITDIR=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw ## where your git checkout is located +COMROT=$GITDIR/FV3GFSrun ## default COMROT directory +EXPDIR=$GITDIR/FV3GFSwfm ## default EXPDIR directory +ICSDIR=/lfs1/BMC/gsd-fv3-test/rtfim/FV3ICS_L127 + +PSLOT=p8 +IDATE=2022110900 +EDATE=2022110900 +RESDET=768 ## 96 192 384 768 + +### gfs_cyc 1 00Z only; gfs_cyc 2 00Z and 12Z + +./setup_expt.py gfs forecast-only --pslot $PSLOT --gfs_cyc 1 \ + --idate $IDATE --edate $EDATE --resdet $RESDET \ + --comrot $COMROT --expdir $EXPDIR + diff --git a/workflow/prod.yml b/workflow/prod.yml new file mode 100644 index 0000000000..98755d2f86 --- /dev/null +++ b/workflow/prod.yml @@ -0,0 +1,414 @@ +--- +suites: + prod00: + edits: + CYC: '00' + prod06: + edits: + CYC: '06' + nodes: + gfs: + atmos: + post: + tasks: + jgfs_atmos_post_manager: + triggers: + - task: jgfs_atmos_tropcy_qc_reloc + suite: prod00 + prod[00,06]: + edits: + ECF_TRIES: '1' + ENVIR: 'prod' + PROJ: 'GFS' + PROJENVIR: 'DEV' + QUEUESHARED: 'dev_shared' + QUEUESERV: 'dev_transfer' + MACHINE_SITE: 'development' + nodes: + gfs: + edits: + RUN: 'gfs' + NET: 'gfs' + tasks: + jgfs_forecast: + triggers: + - task: jgfs_atmos_analysis + event: release_fcst + - task: jgfs_wave_prep + atmos: + obsproc: + dump: + tasks: + jgfs_atmos_tropcy_qc_reloc: + events: + - 'jtwc_bull_email' + prep: + tasks: + jgfs_atmos_emcsfc_sfc_prep: + triggers: + - task: jobsproc_gfs_atmos_dump + event: release_sfcprep + analysis: + tasks: + jgfs_atmos_analysis: + triggers: + - task: jobsproc_gfs_atmos_prep + - task: jgfs_atmos_emcsfc_sfc_prep + events: + - "release_fcst" + jgfs_atmos_analysis_calc: + triggers: + - task: jgfs_atmos_analysis + post: + tasks: + jgfs_atmos_post_manager: + triggers: + - task: jgfs_atmos_analysis + events: + - "release_postanl" + - "release_post( 384 )" + jgfs_atmos_post_anl: + template: skip + triggers: + - task: jgfs_atmos_post_manager + event: release_postanl + edits: + FHRGRP: '000' + FHRLST: 'anl' + HR: 'anl' + FHR: 'anl' + jgfs_atmos_post_f( 384 ): + template: jgfs_atmos_post_master + triggers: + - task: jgfs_atmos_post_manager + event: release_post( ) + edits: + FHRGRP: '( 1, )' + FHRLST: 'f( )' + FHR: 'f( )' + HR: '( )' + post_processing: + tasks: + jgfs_atmos_wafs_gcip: + triggers: + - task: jgfs_atmos_post_f003 + grib_wafs: + tasks: + jgfs_atmos_wafs_f000: + template: jgfs_atmos_wafs_master + triggers: + - task: jgfs_atmos_post_f000 + - task: jgfs_atmos_post_f120 + - task: jgfs_atmos_wafs_grib2 + edits: + FCSTHR: '000' + jgfs_atmos_wafs_f( 6,20,6 ): + template: jgfs_atmos_wafs_master + triggers: + - task: jgfs_atmos_post_f( ) + - task: jgfs_atmos_wafs_f( 0,,6 ) + edits: + FCSTHR: ( ) + grib2_wafs: + tasks: + jgfs_atmos_wafs_grib2: + triggers: + - task: jgfs_atmos_post_f000 + jgfs_atmos_wafs_grib2_0p25: + triggers: + - task: jgfs_atmos_post_f036 + jgfs_atmos_wafs_blending: + triggers: + - task: jgfs_atmos_wafs_grib2 + jgfs_atmos_wafs_blending_0p25: + triggers: + - task: jgfs_atmos_wafs_grib2_0p25 + bufr_sounding: + tasks: + jgfs_atmos_postsnd: + triggers: + - task: jgfs_atmos_post_manager + event: release_post000 + bulletins: + tasks: + jgfs_atmos_fbwind: + triggers: + - task: jgfs_atmos_post_f( 6,3,6 ) + awips_20km_1p0: + tasks: + jgfs_atmos_awips_f( 0,80,3 ): + template: jgfs_atmos_awips_master + triggers: + - task: jgfs_atmos_post_f( ) + edits: + FHRGRP: '( )' + FHRLST: 'f( )' + FCSTHR: '( )' + TRDRUN: 'YES' + jgfs_atmos_awips_f( 3,27,6 ): + edits: + TRDRUN: 'NO' + awips_g2: + tasks: + jgfs_atmos_awips_g2_f( 0,64,6 ): + template: jgfs_atmos_awips_g2_master + triggers: + - task: jgfs_atmos_post_f( ) + edits: + FHRGRP: '( )' + FHRLST: 'f( )' + FCSTHR: '( )' + TRDRUN: 'YES' + gempak: + tasks: + jgfs_atmos_gempak: + triggers: + - task: jgfs_atmos_analysis + jgfs_atmos_gempak_meta: + triggers: + - task: jgfs_atmos_analysis + jgfs_atmos_gempak_ncdc_upapgif: + triggers: + - task: jgfs_atmos_gempak + - task: jgfs_atmos_gempak + jgfs_atmos_npoess_pgrb2_0p5deg: + triggers: + - task: jgfs_atmos_post_anl + state: active + - task: jgfs_atmos_post_anl + jgfs_atmos_pgrb2_spec_gempak: + triggers: + - task: jgfs_atmos_npoess_pgrb2_0p5deg + verf: + tasks: + jgfs_atmos_vminmon: + triggers: + - task: jgfs_atmos_analysis + wave: + init: + tasks: + jgfs_wave_init: + triggers: + - task: jobsproc_gfs_atmos_prep + prep: + tasks: + jgfs_wave_prep: + triggers: + - task: jgfs_wave_init + post: + tasks: + jgfs_wave_postsbs: + triggers: + - task: jgfs_atmos_post_manager + event: release_post000 + jgfs_wave_postpnt: + triggers: + - task: jgfs_forecast + jgfs_wave_post_bndpnt: + triggers: + - task: jgfs_atmos_post_manager + event: release_post180 + jgfs_wave_post_bndpntbll: + triggers: + - task: jgfs_atmos_post_manager + event: release_post180 + jgfs_wave_prdgen_gridded: + triggers: + - task: jgfs_wave_postsbs + state: active + - task: jgfs_wave_postsbs + operand: OR + jgfs_wave_prdgen_bulls: + triggers: + - task: jgfs_wave_postpnt + - task: jgfs_wave_postsbs + gempak: + tasks: + jgfs_wave_gempak: + triggers: + - task: jgfs_wave_postsbs + state: [active, complete] + operand: or + gdas: + edits: + RUN: 'gdas' + tasks: + jgdas_forecast: + triggers: + - task: jgdas_atmos_analysis + event: release_fcst + - task: jgdas_wave_prep + atmos: + obsproc: + dump: + tasks: + jgdas_atmos_tropcy_qc_reloc: + prep: + tasks: + jgdas_atmos_emcsfc_sfc_prep: + triggers: + - task: jobsproc_gdas_atmos_dump + event: release_sfcprep + analysis: + tasks: + jgdas_atmos_analysis: + triggers: + - task: jobsproc_gdas_atmos_prep + - task: jgdas_atmos_emcsfc_sfc_prep + events: + - "release_fcst" + jgdas_atmos_analysis_calc: + triggers: + - task: jgdas_atmos_analysis + jgdas_atmos_analysis_diag: + triggers: + - task: jgdas_atmos_analysis + post: + tasks: + jgdas_atmos_post_manager: + triggers: + - task: jgdas_forecast + state: active + events: + - "release_postanl" + - "release_post( 10 )" + jgdas_atmos_post_anl: + template: jgdas_atmos_post_master + triggers: + - task: jgdas_atmos_post_manager + event: release_postanl + - task: jgdas_atmos_analysis_calc + edits: + FHRGRP: '000' + FHRLST: 'anl' + HR: 'anl' + FHR: 'anl' + jgdas_atmos_post_f( 10 ): + template: jgdas_atmos_post_master + triggers: + - task: jgdas_atmos_post_manager + event: release_post( ) + edits: + FHR: 'f( )' + HR: '( )' + FHRGRP: '( 1, )' + FHRLST: 'f( )' + post_processing: + tasks: + jgdas_atmos_chgres_forenkf: + triggers: + - task: jgdas_forecast + #- task: forecast + gempak: + tasks: + jgdas_atmos_gempak: + triggers: + - task: jgdas_forecast + jgdas_atmos_gempak_meta_ncdc: + triggers: + - task: jgdas_atmos_gempak + verf: + tasks: + jgdas_atmos_vminmon: + triggers: + - task: jgdas_atmos_analysis + jgdas_atmos_verfrad: + triggers: + - task: jgdas_atmos_analysis_diag + jgdas_atmos_verfozn: + triggers: + - task: jgdas_atmos_analysis_diag + wave: + init: + tasks: + jgdas_wave_init: + triggers: + - task: jobsproc_gdas_atmos_prep + prep: + tasks: + jgdas_wave_prep: + triggers: + - task: jgdas_wave_init + post: + tasks: + jgdas_wave_postsbs: + triggers: + - task: jgdas_atmos_post_manager + event: release_post000 + jgdas_wave_postpnt: + triggers: + - task: jgdas_forecast + enkfgdas: + edits: + RUN: 'gdas' + analysis: + create: + tasks: + jenkfgdas_select_obs: + triggers: + - task: jobsproc_gdas_atmos_prep + jenkfgdas_diag: + triggers: + - task: jenkfgdas_select_obs + jenkfgdas_update: + triggers: + - task: jenkfgdas_diag + recenter: + ecen: + grp(1, 3): + tasks: + jenkfgdas_ecen( ): + template: jenkfgdas_ecen + triggers: + - task: jgdas_atmos_post_f( ) + edits: + FHRGRP( 3 ): '( )' + tasks: + jenkfgdas_sfc: + triggers: + - task: jgdas_atmos_analysis_calc + - task: jenkfgdas_update + #forecast: + #grp( 1,40 ): + # edits: + # ENSGRP: '( )' + # tasks: + # jenkfgdas_fcst: + post: + tasks: + jenkfgdas_post_f( 3,6 ): + triggers: + - family: ['enkfgdas/analysis/recenter/ecen', 'enkfgdas/analysis/create'] + operand: 'or' + template: jenkfgdas_post_master + edits: + FHMIN_EPOS: ( ) + FHMAX_EPOS: ( ) + FHOUT_EPOS: ( ) + obsproc: + defstatus: complete + v1.0: + gfs: + atmos: + dump: + tasks: + jobsproc_gfs_atmos_dump: + template: skip + events: + - "release_sfcprep" + prep: + tasks: + jobsproc_gfs_atmos_prep: + template: skip + gdas: + atmos: + dump: + tasks: + jobsproc_gdas_atmos_dump: + template: skip + events: + - "release_sfcprep" + prep: + tasks: + jobsproc_gdas_atmos_prep: + template: skip diff --git a/workflow/pygw b/workflow/pygw new file mode 120000 index 0000000000..dfa1d9a164 --- /dev/null +++ b/workflow/pygw @@ -0,0 +1 @@ +../ush/python/pygw/src/pygw \ No newline at end of file diff --git a/workflow/rocoto/__init__.py b/workflow/rocoto/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/workflow/rocoto/rocoto.py b/workflow/rocoto/rocoto.py new file mode 100644 index 0000000000..b3f88f01d3 --- /dev/null +++ b/workflow/rocoto/rocoto.py @@ -0,0 +1,385 @@ +#!/usr/bin/env python3 + +from typing import Union, List, Dict, Any + +''' + MODULE: + rocoto.py + + ABOUT: + Helper module to create tasks, metatasks, and dependencies for Rocoto +''' + +__all__ = ['create_task', 'create_metatask', + 'add_dependency', 'create_dependency', + 'create_envar', 'create_entity', 'create_cycledef'] + + +def create_metatask(task_dict: Dict[str, Any], metatask_dict: Dict[str, Any]) -> List[str]: + """ + create a Rocoto metatask given a dictionary containing task and metatask information + :param metatask_dict: metatask key-value parameters + :type metatask_dict: dict + :param task_dict: task key-value parameters + :type task_dict: dict + :return: Rocoto metatask + :rtype: list + """ + + # Grab metatask info from the metatask_dict + metataskname = metatask_dict.get('metataskname', 'demometatask') + varname = metatask_dict.get('varname', 'demovar') + varval = metatask_dict.get('varval', 1) + vardict = metatask_dict.get('vardict', None) + + strings = [f'\n', + '\n', + f'\t{str(varval)}\n'] + + if vardict is not None: + for key in vardict.keys(): + value = str(vardict[key]) + strings.append(f'\t{value}\n') + strings.append('\n') + tasklines = create_task(task_dict) + for tl in tasklines: + strings.append(f'{tl}') if tl == '\n' else strings.append(f'\t{tl}') + strings.append('\n') + strings.append('\n') + + return strings + + +def create_task(task_dict: Dict[str, Any]) -> List[str]: + """ + create a Rocoto task given a dictionary containing task information + :param task_dict: task key-value parameters + :type task_dict: dict + :return: Rocoto task + :rtype: list + """ + + # Grab task info from the task_names + taskname = task_dict.get('taskname', 'demotask') + cycledef = task_dict.get('cycledef', 'democycle') + maxtries = task_dict.get('maxtries', 3) + final = task_dict.get('final', False) + command = task_dict.get('command', 'sleep 10') + jobname = task_dict.get('jobname', 'demojob') + resources_dict = task_dict['resources'] + account = resources_dict.get('account', 'batch') + queue = resources_dict.get('queue', 'debug') + partition = resources_dict.get('partition', None) + walltime = resources_dict.get('walltime', '00:01:00') + native = resources_dict.get('native', None) + memory = resources_dict.get('memory', None) + nodes = resources_dict.get('nodes', 1) + ppn = resources_dict.get('ppn', 1) + threads = resources_dict.get('threads', 1) + log = task_dict.get('log', 'demo.log') + envar = task_dict.get('envars', None) + dependency = task_dict.get('dependency', []) + + str_maxtries = str(maxtries) + str_final = ' final="true"' if final else '' + envar = envar if isinstance(envar, list) else [envar] + + strings = [f'\n', + '\n', + f'\t{command}\n', + '\n', + f'\t{jobname}\n', + f'\t{account}\n', + f'\t{queue}\n'] + + if partition is not None: + strings.append(f'\t{partition}\n') + strings.append(f'\t{walltime}\n') + strings.append(f'\t{nodes}:ppn={ppn}:tpp={threads}\n') + if memory is not None: + strings.append(f'\t{memory}\n') + if native is not None: + strings.append(f'\t{native}\n') + strings.append('\n') + strings.append(f'\t{log}\n') + strings.append('\n') + + if envar[0] is not None: + for e in envar: + strings.append(f'\t{e}\n') + strings.append('\n') + + if dependency is not None and len(dependency) > 0: + strings.append('\t\n') + for d in dependency: + strings.append(f'\t\t{d}\n') + strings.append('\t\n') + strings.append('\n') + elif taskname != "gfswaveinit": + print("WARNING: No dependencies for task " + taskname) + + strings.append('\n') + + return strings + + +def add_dependency(dep_dict: Dict[str, Any]) -> str: + """ + create a simple Rocoto dependency given a dictionary with dependency information + :param dep_dict: dependency key-value parameters + :type dep_dict: dict + :return: Rocoto simple dependency + :rtype: str + """ + + tag_map = {'task': _add_task_tag, + 'metatask': _add_task_tag, + 'data': _add_data_tag, + 'cycleexist': _add_cycle_tag, + 'streq': _add_streq_tag, + 'strneq': _add_streq_tag} + + dep_condition = dep_dict.get('condition', None) + dep_type = dep_dict.get('type', None) + + try: + string = tag_map[dep_type](dep_dict) + except KeyError: + raise KeyError(f'{dep_type} is an unknown dependency type.\n' + + 'Currently supported dependency types are:\n' + + f'{" | ".join(tag_map.keys())}') + + if dep_condition is not None: + string = f'<{dep_condition}>{string}' + + return string + + +def _add_task_tag(dep_dict: Dict[str, Any]) -> str: + """ + create a simple task or metatask tag + :param dep_dict: dependency key-value parameters + :type dep_dict: dict + :return: Rocoto simple task or metatask dependency + :rtype: str + """ + + dep_type = dep_dict.get('type', None) + dep_name = dep_dict.get('name', None) + dep_offset = dep_dict.get('offset', None) + + if dep_name is None: + msg = f'a {dep_type} name is necessary for {dep_type} dependency' + raise KeyError(msg) + + string = '<' + string += f'{dep_type}dep {dep_type}="{dep_name}"' + if dep_offset is not None: + string += f' cycle_offset="{dep_offset}"' + string += '/>' + + return string + + +def _add_data_tag(dep_dict: Dict[str, Any]) -> str: + """ + create a simple data tag + :param dep_dict: dependency key-value parameters + :type dep_dict: dict + :return: Rocoto simple task or metatask dependency + :rtype: str + """ + + dep_type = dep_dict.get('type', None) + dep_data = dep_dict.get('data', None) + dep_offset = dep_dict.get('offset', None) + + if dep_data is None: + msg = f'a data value is necessary for {dep_type} dependency' + raise KeyError(msg) + + if not isinstance(dep_data, list): + dep_data = [dep_data] + + if not isinstance(dep_offset, list): + dep_offset = [dep_offset] + + assert len(dep_data) == len(dep_offset) + + strings = [''] + for data, offset in zip(dep_data, dep_offset): + if '@' in data: + offset_str = '' if offset in [None, ''] else f' offset="{offset}"' + offset_string_b = f'' + offset_string_e = '' + else: + offset_string_b = '' + offset_string_e = '' + + strings.append(f'{offset_string_b}{data}{offset_string_e}') + + strings.append('') + + return ''.join(strings) + + +def _add_cycle_tag(dep_dict: Dict[str, Any]) -> str: + """ + create a simple cycle exist tag + :param dep_dict: dependency key-value parameters + :type dep_dict: dict + :return: Rocoto simple task or metatask dependency + :rtype: str + """ + + dep_type = dep_dict.get('type', None) + dep_offset = dep_dict.get('offset', None) + + if dep_offset is None: + msg = f'an offset value is necessary for {dep_type} dependency' + raise KeyError(msg) + + string = f'' + + return string + + +def _add_streq_tag(dep_dict: Dict[str, Any]) -> str: + """ + create a simple string comparison tag + :param dep_dict: dependency key-value parameters + :type dep_dict: dict + :return: Rocoto simple task or metatask dependency + :rtype: str + """ + + dep_type = dep_dict.get('type', None) + dep_left = dep_dict.get('left', None) + dep_right = dep_dict.get('right', None) + + fail = False + msg = '' + if dep_left is None: + msg += f'a left value is necessary for {dep_type} dependency' + fail = True + else: + dep_left = str(dep_left) + if dep_right is None: + if fail: + msg += '\n' + msg += f'a right value is necessary for {dep_type} dependency' + fail = True + else: + dep_right = str(dep_right) + if fail: + raise KeyError(msg) + + if '@' in dep_left: + dep_left = f'{dep_left}' + if '@' in dep_right: + dep_right = f'{dep_right}' + + string = f'<{dep_type}>{dep_left}{dep_right}' + + return string + + +def _traverse(o, tree_types=(list, tuple)): + """ + Traverse through a list of lists or tuples and yield the value + Objective is to flatten a list of lists or tuples + :param o: list of lists or not + :type o: list, tuple, scalar + :param tree_types: trees to travers + :type tree_types: tuple + :return: value in the list or tuple + :rtype: scalar + """ + + if isinstance(o, tree_types): + for value in o: + for subvalue in _traverse(value, tree_types): + yield subvalue + else: + yield o + + +def create_dependency(dep_condition=None, dep=[]) -> List[str]: + """ + create a compound dependency given a list of dependencies, and compounding condition + the list of dependencies are created using add_dependency + :param dep_condition: dependency condition + :type dep_condition: boolean e.g. and, or, true, false + :param dep: dependency + :type dep: str or list + :return: Rocoto compound dependency + :rtype: list + """ + + dep = dep if isinstance(dep, list) else [dep] + + strings = [] + + if len(dep) > 0: + if dep_condition is not None: + strings.append(f'<{dep_condition}>') + + for d in dep: + if dep_condition is None: + strings.append(f'{d}') + else: + for e in _traverse(d): + strings.append(f'\t{e}') + + if dep_condition is not None: + strings.append(f'') + + return strings + + +def create_envar(name: str, value: Union[str, float, int]) -> str: + """ + create a Rocoto environment variable given name and value + returns the environment variable as a string + :param name: name of the environment variable + :type name: str + :param value: value of the environment variable + :type value: str or float or int or unicode + :return: Rocoto environment variable key-value pair + :rtype: str + """ + + return f'{name}{str(value)}' + + +def create_cycledef(group=None, start=None, stop=None, step=None): + """ + create a Rocoto cycle definition + returns the environment variable as a string + :param group: cycle definition group name + :type group: str + :param start: cycle start datetime + :type start: str + :param step: cycle interval (timedelta) + :type stop: str + :param step: cycle interval (timedelta) + :return: Rocoto cycledef variable string + :rtype: str + """ + + return f'{start} {stop} {step}' + + +def create_entity(name: str, value: Union[str, float, int]) -> str: + """ + create an XML ENTITY variable given name and value + returns the variable as a string + :param name: name of the variable + :type name: str + :param value: value of the variable + :type value: str or float or int or unicode + :return: XML entity variable key-value pair + :rtype: str + """ + + return f'' diff --git a/workflow/rocoto/workflow_tasks.py b/workflow/rocoto/workflow_tasks.py new file mode 120000 index 0000000000..75800b2bf4 --- /dev/null +++ b/workflow/rocoto/workflow_tasks.py @@ -0,0 +1 @@ +workflow_tasks_gsl.py \ No newline at end of file diff --git a/workflow/rocoto/workflow_tasks_emc.py b/workflow/rocoto/workflow_tasks_emc.py new file mode 100644 index 0000000000..7b050614e5 --- /dev/null +++ b/workflow/rocoto/workflow_tasks_emc.py @@ -0,0 +1,1463 @@ +#!/usr/bin/env python3 + +import numpy as np +from typing import List +from applications import AppConfig +import rocoto.rocoto as rocoto +from pygw.template import Template, TemplateConstants + +__all__ = ['Tasks', 'create_wf_task', 'get_wf_tasks'] + + +class Tasks: + SERVICE_TASKS = ['arch', 'earc'] + VALID_TASKS = ['aerosol_init', 'coupled_ic', + 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', + 'atmanlinit', 'atmanlrun', 'atmanlfinal', + 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', + 'earc', 'ecen', 'echgres', 'ediag', 'efcs', + 'eobs', 'eomg', 'epos', 'esfc', 'eupd', + 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', + 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', + 'preplandobs', 'landanlinit', 'landanlrun', 'landanlfinal', + 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', + 'postsnd', 'awips', 'gempak', + 'wafs', 'wafsblending', 'wafsblending0p25', + 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25', + 'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit', + 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep'] + + def __init__(self, app_config: AppConfig, cdump: str) -> None: + + self.app_config = app_config + self.cdump = cdump + + # Save dict_configs and base in the internal state (never know where it may be needed) + self._configs = self.app_config.configs + self._base = self._configs['base'] + + self.n_tiles = 6 # TODO - this needs to be elsewhere + + envar_dict = {'RUN_ENVIR': self._base.get('RUN_ENVIR', 'emc'), + 'HOMEgfs': self._base.get('HOMEgfs'), + 'EXPDIR': self._base.get('EXPDIR'), + 'NET': 'gfs', + 'CDUMP': self.cdump, + 'RUN': self.cdump, + 'CDATE': '@Y@m@d@H', + 'PDY': '@Y@m@d', + 'cyc': '@H', + 'COMROOT': self._base.get('COMROOT'), + 'DATAROOT': self._base.get('DATAROOT')} + self.envars = self._set_envars(envar_dict) + + @staticmethod + def _set_envars(envar_dict) -> list: + + envars = [] + for key, value in envar_dict.items(): + envars.append(rocoto.create_envar(name=key, value=str(value))) + + return envars + + @staticmethod + def _get_hybgroups(nens: int, nmem_per_group: int, start_index: int = 1): + ngrps = nens / nmem_per_group + groups = ' '.join([f'{x:02d}' for x in range(start_index, int(ngrps) + 1)]) + return groups + + @staticmethod + def _is_this_a_gdas_task(cdump, task_name): + if cdump != 'enkfgdas': + raise TypeError(f'{task_name} must be part of the "enkfgdas" cycle and not {cdump}') + + def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) -> str: + ''' + Takes a string templated with ${ } and converts it into a string suitable + for use in a rocoto . Some common substitutions are defined by + default. Any additional variables in the template and overrides of the + defaults can be passed in by an optional dict. + + Variables substitued by default: + ${ROTDIR} -> '&ROTDIR;' + ${RUN} -> self.cdump + ${DUMP} -> self.cdump + ${MEMDIR} -> '' + ${YMD} -> '@Y@m@d' + ${HH} -> '@H' + + Parameters + ---------- + template: str + Template string with variables to be replaced + subs_dict: dict, optional + Dictionary containing substitutions + + Returns + ------- + str + Updated string with variables substituted + + ''' + + # Defaults + rocoto_conversion_dict = { + 'ROTDIR': '&ROTDIR;', + 'RUN': self.cdump, + 'DUMP': self.cdump, + 'MEMDIR': '', + 'YMD': '@Y@m@d', + 'HH': '@H' + } + + rocoto_conversion_dict.update(subs_dict) + + return Template.substitute_structure(template, + TemplateConstants.DOLLAR_CURLY_BRACE, + rocoto_conversion_dict.get) + + def get_resource(self, task_name): + """ + Given a task name (task_name) and its configuration (task_names), + return a dictionary of resources (task_resource) used by the task. + Task resource dictionary includes: + account, walltime, cores, nodes, ppn, threads, memory, queue, partition, native + """ + + scheduler = self.app_config.scheduler + + task_config = self._configs[task_name] + + account = task_config['ACCOUNT'] + + walltime = task_config[f'wtime_{task_name}'] + if self.cdump in ['gfs'] and f'wtime_{task_name}_gfs' in task_config.keys(): + walltime = task_config[f'wtime_{task_name}_gfs'] + + cores = task_config[f'npe_{task_name}'] + if self.cdump in ['gfs'] and f'npe_{task_name}_gfs' in task_config.keys(): + cores = task_config[f'npe_{task_name}_gfs'] + + ppn = task_config[f'npe_node_{task_name}'] + if self.cdump in ['gfs'] and f'npe_node_{task_name}_gfs' in task_config.keys(): + ppn = task_config[f'npe_node_{task_name}_gfs'] + + nodes = np.int(np.ceil(np.float(cores) / np.float(ppn))) + + threads = task_config[f'nth_{task_name}'] + if self.cdump in ['gfs'] and f'nth_{task_name}_gfs' in task_config.keys(): + threads = task_config[f'nth_{task_name}_gfs'] + + memory = task_config.get(f'memory_{task_name}', None) + + native = None + if scheduler in ['pbspro']: + native = '-l debug=true,place=vscatter' + if task_config.get('is_exclusive', False): + native += ':exclhost' + elif scheduler in ['slurm']: + native = '--export=NONE' + + queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE'] + + partition = None + if scheduler in ['slurm']: + partition = task_config['PARTITION_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config[ + 'PARTITION_BATCH'] + + task_resource = {'account': account, + 'walltime': walltime, + 'nodes': nodes, + 'cores': cores, + 'ppn': ppn, + 'threads': threads, + 'memory': memory, + 'native': native, + 'queue': queue, + 'partition': partition} + + return task_resource + + def get_task(self, task_name, *args, **kwargs): + """ + Given a task_name, call the method for that task + """ + try: + return getattr(self, task_name, *args, **kwargs)() + except AttributeError: + raise AttributeError(f'"{task_name}" is not a valid task.\n' + + 'Valid tasks are:\n' + + f'{", ".join(Tasks.VALID_TASKS)}') + + # Specific Tasks begin here + def coupled_ic(self): + + cpl_ic = self._configs['coupled_ic'] + + deps = [] + + # Atm ICs + if self.app_config.do_atm: + atm_res = self._base.get('CASE', 'C384') + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/{self.cdump}" + for file in ['gfs_ctrl.nc'] + \ + [f'{datatype}_data.tile{tile}.nc' + for datatype in ['gfs', 'sfc'] + for tile in range(1, self.n_tiles + 1)]: + data = f"{prefix}/{atm_res}/INPUT/{file}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + else: # data-atmosphere + # TODO - need more information about how these forcings are stored + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_DATM']}/@Y@m@d@H" + data = f"{prefix}/gefs.@Y@m.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ocean ICs + if self.app_config.do_ocean: + ocn_res = f"{self._base.get('OCNRES', '025'):03d}" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/ocn" + for res in ['res'] + [f'res_{res_index}' for res_index in range(1, 4)]: + data = f"{prefix}/{ocn_res}/MOM.{res}.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ice ICs + if self.app_config.do_ice: + ice_res = f"{self._base.get('ICERES', '025'):03d}" + ice_res_dec = f'{float(ice_res) / 100:.2f}' + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/ice" + data = f"{prefix}/{ice_res}/cice5_model_{ice_res_dec}.res_@Y@m@d@H.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Wave ICs + if self.app_config.do_wave: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/wav" + for wave_grid in self._configs['waveinit']['waveGRD'].split(): + data = f"{prefix}/{wave_grid}/@Y@m@d.@H0000.restart.{wave_grid}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('coupled_ic') + task = create_wf_task('coupled_ic', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def prep(self): + + dump_suffix = self._base["DUMP_SUFFIX"] + gfs_cyc = self._base["gfs_cyc"] + dmpdir = self._base["DMPDIR"] + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'RUN': 'gdas'}) + dump_path = self._template_to_rocoto_cycstring(self._base["COM_OBSDMP_TMPL"], + {'DMPDIR': dmpdir, 'DUMP_SUFFIX': dump_suffix}) + + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + deps = [] + dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc' + dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{dump_path}/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = self.cdump + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + + resources = self.get_resource('prep') + task = create_wf_task('prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def waveinit(self): + + resources = self.get_resource('waveinit') + dependencies = None + if self.app_config.mode in ['cycled']: + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + + task = create_wf_task('waveinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveprep(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('waveprep') + task = create_wf_task('waveprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aerosol_init(self): + + input_path = self._template_to_rocoto_cycstring(self._base['COM_ATMOS_INPUT_TMPL']) + restart_path = self._template_to_rocoto_cycstring(self._base['COM_ATMOS_RESTART_TMPL']) + + deps = [] + # Files from current cycle + files = ['gfs_ctrl.nc'] + [f'gfs_data.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + for file in files: + data = f'{input_path}/{file}' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Calculate offset based on CDUMP = gfs | gdas + interval = None + if self.cdump in ['gfs']: + interval = self._base['INTERVAL_GFS'] + elif self.cdump in ['gdas']: + interval = self._base['INTERVAL'] + offset = f'-{interval}' + + # Files from previous cycle + files = [f'@Y@m@d.@H0000.fv_core.res.nc'] + \ + [f'@Y@m@d.@H0000.fv_core.res.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + \ + [f'@Y@m@d.@H0000.fv_tracer.res.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + + for file in files: + data = [f'{restart_path}', file] + dep_dict = {'type': 'data', 'data': data, 'offset': [offset, None]} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = 'gfs_seq' + resources = self.get_resource('aerosol_init') + task = create_wf_task('aerosol_init', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def anal(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('anal') + task = create_wf_task('anal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def sfcanl(self): + + deps = [] + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jedilandda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlfinal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('sfcanl') + task = create_wf_task('sfcanl', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def analcalc(self): + + deps = [] + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar and self.cdump in ['gdas']: + dep_dict = {'type': 'task', 'name': 'enkfgdasechgres', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('analcalc') + task = create_wf_task('analcalc', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def analdiag(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('analdiag') + task = create_wf_task('analdiag', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmanlinit(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + gfs_cyc = self._base["gfs_cyc"] + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + cycledef = self.cdump + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + + resources = self.get_resource('atmanlinit') + task = create_wf_task('atmanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def atmanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmanlrun') + task = create_wf_task('atmanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmanlfinal') + task = create_wf_task('atmanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aeroanlinit(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + + resources = self.get_resource('aeroanlinit') + task = create_wf_task('aeroanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + return task + + def aeroanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('aeroanlrun') + task = create_wf_task('aeroanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aeroanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('aeroanlfinal') + task = create_wf_task('aeroanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def preplandobs(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('preplandobs') + task = create_wf_task('preplandobs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=f'{self.cdump}_land_prep') + + return task + + def landanlinit(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + + # Either gdaspreplandobs (runs in 18z cycle) or not 18z cycle + sub_deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}preplandobs'} + sub_deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'strneq', 'left': '@H', 'right': 18} + sub_deps.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.create_dependency(dep_condition='xor', dep=sub_deps)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('landanlinit') + task = create_wf_task('landanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + return task + + def landanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('landanlrun') + task = create_wf_task('landanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def landanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('landanlfinal') + task = create_wf_task('landanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def ocnanalprep(self): + + dump_suffix = self._base["DUMP_SUFFIX"] + dmpdir = self._base["DMPDIR"] + ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"]) + + deps = [] + data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc' + dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalprep') + task = create_wf_task('ocnanalprep', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalbmat(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalprep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalbmat') + task = create_wf_task('ocnanalbmat', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalbmat'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalrun') + task = create_wf_task('ocnanalrun', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalchkpt(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalrun'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_mergensst: + data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.sfcanl.nc' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalchkpt') + task = create_wf_task('ocnanalchkpt', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalpost(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalchkpt'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalpost') + task = create_wf_task('ocnanalpost', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalvrfy(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalvrfy') + task = create_wf_task('ocnanalvrfy', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def fcst(self): + + fcst_map = {'forecast-only': self._fcst_forecast_only, + 'cycled': self._fcst_cycled} + + try: + task = fcst_map[self.app_config.mode]() + except KeyError: + raise NotImplementedError(f'{self.app_config.mode} is not a valid type.\n' + + 'Currently supported forecast types are:\n' + + f'{" | ".join(fcst_map.keys())}') + + return task + + def _fcst_forecast_only(self): + dependencies = [] + + dep_dict = {'type': 'task', 'name': f'{self.cdump}coupled_ic'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: + wave_job = 'waveprep' if self.app_config.model_app in ['ATMW'] else 'waveinit' + dep_dict = {'type': 'task', 'name': f'{self.cdump}{wave_job}'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_aero: + # Calculate offset based on CDUMP = gfs | gdas + interval = None + if self.cdump in ['gfs']: + interval = self._base['INTERVAL_GFS'] + elif self.cdump in ['gdas']: + interval = self._base['INTERVAL'] + offset = f'-{interval}' + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aerosol_init'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': offset} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies.append(rocoto.create_dependency(dep_condition='or', dep=deps)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + resources = self.get_resource('fcst') + task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def _fcst_cycled(self): + + dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} + dep = rocoto.add_dependency(dep_dict) + dependencies = rocoto.create_dependency(dep=dep) + + if self.app_config.do_jediocnvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveprep'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_aero: + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlfinal'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_jedilandda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlfinal'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + if self.cdump in ['gdas']: + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('fcst') + task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def post(self): + add_anl_to_post = False + if self.app_config.mode in ['cycled']: + add_anl_to_post = True + + return self._post_task('post', add_anl_to_post=add_anl_to_post) + + def ocnpost(self): + if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost in cycled mode + return self._post_task('ocnpost', add_anl_to_post=False) + + def _post_task(self, task_name, add_anl_to_post=False): + if task_name not in ['post', 'ocnpost']: + raise KeyError(f'Invalid post-processing task: {task_name}') + + if task_name in ['ocnpost']: + add_anl_to_post = False + + def _get_postgroups(cdump, config, add_anl=False): + + fhmin = config['FHMIN'] + fhmax = config['FHMAX'] + fhout = config['FHOUT'] + + # Get a list of all forecast hours + fhrs = [] + if cdump in ['gdas']: + fhrs = range(fhmin, fhmax + fhout, fhout) + elif cdump in ['gfs']: + fhmax = np.max( + [config['FHMAX_GFS_00'], config['FHMAX_GFS_06'], config['FHMAX_GFS_12'], config['FHMAX_GFS_18']]) + fhout = config['FHOUT_GFS'] + fhmax_hf = config['FHMAX_HF_GFS'] + fhout_hf = config['FHOUT_HF_GFS'] + fhrs_hf = range(fhmin, fhmax_hf + fhout_hf, fhout_hf) + fhrs = list(fhrs_hf) + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout)) + + npostgrp = config['NPOSTGRP'] + ngrps = npostgrp if len(fhrs) > npostgrp else len(fhrs) + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + anl = ['anl'] if add_anl else [] + + grp = ' '.join(anl + [f'_{fhr[0]}-{fhr[-1]}' for fhr in fhrs]) + dep = ' '.join(anl + [fhr[-1] for fhr in fhrs]) + lst = ' '.join(anl + ['_'.join(fhr) for fhr in fhrs]) + + return grp, dep, lst + + deps = [] + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"]) + data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + + postenvars = self.envars.copy() + postenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#', + 'ROTDIR': self._base.get('ROTDIR')} + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_postgroups(self.cdump, self._configs[task_name], add_anl=add_anl_to_post) + vardict = {varname2: varval2, varname3: varval3} + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource(task_name) + task = create_wf_task(task_name, resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + metatask=task_name, varname=varname1, varval=varval1, vardict=vardict, cycledef=cycledef) + + return task + + def wavepostsbs(self): + deps = [] + for wave_grid in self._configs['wavepostsbs']['waveGRD'].split(): + wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"]) + data = f'{wave_hist_path}/{self.cdump}wave.out_grd.{wave_grid}.@Y@m@d.@H0000' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('wavepostsbs') + task = create_wf_task('wavepostsbs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavepostbndpnt(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavepostbndpnt') + task = create_wf_task('wavepostbndpnt', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavepostbndpntbll(self): + deps = [] + wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"]) + data = f'{wave_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavepostbndpntbll') + task = create_wf_task('wavepostbndpntbll', resources, cdump=self.cdump, envar=self.envars, + dependency=dependencies) + + return task + + def wavepostpnt(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave_bnd: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpntbll'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('wavepostpnt') + task = create_wf_task('wavepostpnt', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavegempak(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavegempak') + task = create_wf_task('wavegempak', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveawipsbulls(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('waveawipsbulls') + task = create_wf_task('waveawipsbulls', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveawipsgridded(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('waveawipsgridded') + task = create_wf_task('waveawipsgridded', resources, cdump=self.cdump, envar=self.envars, + dependency=dependencies) + + return task + + def wafs(self): + return self._wafs_task('wafs') + + def wafsgcip(self): + return self._wafs_task('wafsgcip') + + def wafsgrib2(self): + return self._wafs_task('wafsgrib2') + + def wafsgrib20p25(self): + return self._wafs_task('wafsgrib20p25') + + def _wafs_task(self, task_name): + if task_name not in ['wafs', 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25']: + raise KeyError(f'Invalid WAFS task: {task_name}') + + wafs_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_WAFS_TMPL"]) + + deps = [] + fhrlst = [6] + [*range(12, 36 + 3, 3)] + for fhr in fhrlst: + data = f'{wafs_path}/{self.cdump}.t@Hz.wafs.grb2if{fhr:03d}' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource(task_name) + task = create_wf_task(task_name, resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wafsblending(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wafsgrib2'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wafsblending') + task = create_wf_task('wafsblending', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wafsblending0p25(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wafsgrib20p25'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wafsblending0p25') + task = create_wf_task('wafsblending0p25', resources, cdump=self.cdump, envar=self.envars, + dependency=dependencies) + + return task + + def postsnd(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('postsnd') + task = create_wf_task('postsnd', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def awips(self): + + def _get_awipsgroups(cdump, config): + + fhmin = config['FHMIN'] + fhmax = config['FHMAX'] + fhout = config['FHOUT'] + + # Get a list of all forecast hours + fhrs = [] + if cdump in ['gdas']: + fhrs = range(fhmin, fhmax + fhout, fhout) + elif cdump in ['gfs']: + fhmax = np.max( + [config['FHMAX_GFS_00'], config['FHMAX_GFS_06'], config['FHMAX_GFS_12'], config['FHMAX_GFS_18']]) + fhout = config['FHOUT_GFS'] + fhmax_hf = config['FHMAX_HF_GFS'] + fhout_hf = config['FHOUT_HF_GFS'] + if fhmax > 240: + fhmax = 240 + if fhmax_hf > 240: + fhmax_hf = 240 + fhrs_hf = list(range(fhmin, fhmax_hf + fhout_hf, fhout_hf)) + fhrs = fhrs_hf + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout)) + + nawipsgrp = config['NAWIPSGRP'] + ngrps = nawipsgrp if len(fhrs) > nawipsgrp else len(fhrs) + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + grp = ' '.join([f'_{fhr[0]}-{fhr[-1]}' for fhr in fhrs]) + dep = ' '.join([fhr[-1] for fhr in fhrs]) + lst = ' '.join(['_'.join(fhr) for fhr in fhrs]) + + return grp, dep, lst + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + awipsenvars = self.envars.copy() + awipsenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#', + 'ROTDIR': self._base.get('ROTDIR')} + for key, value in awipsenvar_dict.items(): + awipsenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_awipsgroups(self.cdump, self._configs['awips']) + vardict = {varname2: varval2, varname3: varval3} + + resources = self.get_resource('awips') + task = create_wf_task('awips', resources, cdump=self.cdump, envar=awipsenvars, dependency=dependencies, + metatask='awips', varname=varname1, varval=varval1, vardict=vardict) + + return task + + def gempak(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('gempak') + task = create_wf_task('gempak', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def vrfy(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('vrfy') + task = create_wf_task('vrfy', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def fit2obs(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('fit2obs') + task = create_wf_task('fit2obs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def metp(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + metpenvars = self.envars.copy() + metpenvar_dict = {'SDATE_GFS': self._base.get('SDATE_GFS'), + # TODO - in Forecast-only, this is `SDATE` on the RHS + 'METPCASE': '#metpcase#'} + for key, value in metpenvar_dict.items(): + metpenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1 = 'metpcase' + varval1 = 'g2g1 g2o1 pcp1' + + resources = self.get_resource('metp') + task = create_wf_task('metp', resources, cdump=self.cdump, envar=metpenvars, dependency=dependencies, + metatask='metp', varname=varname1, varval=varval1) + + return task + + def arch(self): + deps = [] + if self.app_config.do_vrfy: + dep_dict = {'type': 'task', 'name': f'{self.cdump}vrfy'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_fit2obs and self.cdump in ['gdas']: + dep_dict = {'type': 'task', 'name': f'{self.cdump}fit2obs'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_metp and self.cdump in ['gfs']: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}metp'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave_bnd: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_ocean: + if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost to run in cycled mode + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocnpost'} + deps.append(rocoto.add_dependency(dep_dict)) + # If all verification and ocean/wave coupling is off, add the gdas/gfs post metatask as a dependency + if len(deps) == 0: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('arch') + task = create_wf_task('arch', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + # Start of ensemble tasks + def eobs(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('eobs') + task = create_wf_task('eobs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def eomg(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}eobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + eomgenvars = self.envars.copy() + eomgenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['eobs']['NMEM_EOMGGRP']) + + resources = self.get_resource('eomg') + task = create_wf_task('eomg', resources, cdump=self.cdump, envar=eomgenvars, dependency=dependencies, + metatask='eomn', varname='grp', varval=groups) + + return task + + def ediag(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}eobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ediag') + task = create_wf_task('ediag', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def eupd(self): + deps = [] + if self.app_config.lobsdiag_forenkf: + dep_dict = {'type': 'task', 'name': f'{self.cdump}ediag'} + else: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}eomn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('eupd') + task = create_wf_task('eupd', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmensanlinit(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = "gdas" + resources = self.get_resource('atmensanlinit') + task = create_wf_task('atmensanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def atmensanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmensanlrun') + task = create_wf_task('atmensanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmensanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmensanlfinal') + task = create_wf_task('atmensanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def ecen(self): + + def _get_ecengroups(): + + if self._base.get('DOIAU_ENKF', False): + fhrs = list(self._base.get('IAUFHRS', '6').split(',')) + + necengrp = self._configs['ecen']['NECENGRP'] + ngrps = necengrp if len(fhrs) > necengrp else len(fhrs) + + fhrs = [f'{int(fhr):03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + grp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) + dep = ' '.join([f[-1] for f in fhrs]) + lst = ' '.join(['_'.join(f) for f in fhrs]) + + else: + grp = '000' + dep = 'f006' + lst = 'f006' + + return grp, dep, lst + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + ecenenvars = self.envars.copy() + ecenenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#'} + for key, value in ecenenvar_dict.items(): + ecenenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_ecengroups() + vardict = {varname2: varval2, varname3: varval3} + + resources = self.get_resource('ecen') + task = create_wf_task('ecen', resources, cdump=self.cdump, envar=ecenenvars, dependency=dependencies, + metatask='ecmn', varname=varname1, varval=varval1, vardict=vardict) + return task + + def esfc(self): + + # eupd_cdump = 'gdas' if 'gdas' in self.app_config.eupd_cdumps else 'gfs' + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('esfc') + task = create_wf_task('esfc', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def efcs(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ecmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}esfc'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) + + efcsenvars = self.envars.copy() + efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['efcs']['NMEM_EFCSGRP']) + + if self.cdump == "enkfgfs": + groups = self._get_hybgroups(self._base['NMEM_ENS_GFS'], self._configs['efcs']['NMEM_EFCSGRP_GFS']) + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + resources = self.get_resource('efcs') + task = create_wf_task('efcs', resources, cdump=self.cdump, envar=efcsenvars, dependency=dependencies, + metatask='efmn', varname='grp', varval=groups, cycledef=cycledef) + + return task + + def echgres(self): + + self._is_this_a_gdas_task(self.cdump, 'echgres') + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}efcs01'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump + + resources = self.get_resource('echgres') + task = create_wf_task('echgres', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def epos(self): + + def _get_eposgroups(epos): + fhmin = epos['FHMIN_ENKF'] + fhmax = epos['FHMAX_ENKF'] + fhout = epos['FHOUT_ENKF'] + if self.cdump == "enkfgfs": + fhmax = epos['FHMAX_ENKF_GFS'] + fhout = epos['FHOUT_ENKF_GFS'] + fhrs = range(fhmin, fhmax + fhout, fhout) + + neposgrp = epos['NEPOSGRP'] + ngrps = neposgrp if len(fhrs) > neposgrp else len(fhrs) + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [f.tolist() for f in fhrs] + + grp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) + dep = ' '.join([f[-1] for f in fhrs]) + lst = ' '.join(['_'.join(f) for f in fhrs]) + + return grp, dep, lst + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}efmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + eposenvars = self.envars.copy() + eposenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#'} + for key, value in eposenvar_dict.items(): + eposenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_eposgroups(self._configs['epos']) + vardict = {varname2: varval2, varname3: varval3} + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + + resources = self.get_resource('epos') + task = create_wf_task('epos', resources, cdump=self.cdump, envar=eposenvars, dependency=dependencies, + metatask='epmn', varname=varname1, varval=varval1, vardict=vardict, cycledef=cycledef) + + return task + + def earc(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}epmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + earcenvars = self.envars.copy() + earcenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['earc']['NMEM_EARCGRP'], start_index=0) + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + + resources = self.get_resource('earc') + task = create_wf_task('earc', resources, cdump=self.cdump, envar=earcenvars, dependency=dependencies, + metatask='eamn', varname='grp', varval=groups, cycledef=cycledef) + + return task + + +def create_wf_task(task_name, resources, + cdump='gdas', cycledef=None, envar=None, dependency=None, + metatask=None, varname=None, varval=None, vardict=None, + final=False): + tasknamestr = f'{cdump}{task_name}' + metatask_dict = None + if metatask is not None: + tasknamestr = f'{tasknamestr}#{varname}#' + metatask_dict = {'metataskname': f'{cdump}{metatask}', + 'varname': f'{varname}', + 'varval': f'{varval}', + 'vardict': vardict} + + cycledefstr = cdump.replace('enkf', '') if cycledef is None else cycledef + + task_dict = {'taskname': f'{tasknamestr}', + 'cycledef': f'{cycledefstr}', + 'maxtries': '&MAXTRIES;', + 'command': f'&JOBS_DIR;/{task_name}.sh', + 'jobname': f'&PSLOT;_{tasknamestr}_@H', + 'resources': resources, + 'log': f'&ROTDIR;/logs/@Y@m@d@H/{tasknamestr}.log', + 'envars': envar, + 'dependency': dependency, + 'final': final} + + task = rocoto.create_task(task_dict) if metatask is None else rocoto.create_metatask(task_dict, metatask_dict) + + return ''.join(task) + + +def get_wf_tasks(app_config: AppConfig) -> List: + """ + Take application configuration to return a list of all tasks for that application + """ + + tasks = [] + # Loop over all keys of cycles (CDUMP) + for cdump, cdump_tasks in app_config.task_names.items(): + task_obj = Tasks(app_config, cdump) # create Task object based on cdump + for task_name in cdump_tasks: + tasks.append(task_obj.get_task(task_name)) + + return tasks diff --git a/workflow/rocoto/workflow_tasks_gsl.py b/workflow/rocoto/workflow_tasks_gsl.py new file mode 100644 index 0000000000..8caa1a81e0 --- /dev/null +++ b/workflow/rocoto/workflow_tasks_gsl.py @@ -0,0 +1,1464 @@ +#!/usr/bin/env python3 + +import numpy as np +from typing import List +from applications import AppConfig +import rocoto.rocoto as rocoto +from pygw.template import Template, TemplateConstants + +__all__ = ['Tasks', 'create_wf_task', 'get_wf_tasks'] + + +class Tasks: + SERVICE_TASKS = ['arch', 'earc'] + VALID_TASKS = ['aerosol_init', 'coupled_ic', + 'prep', 'anal', 'sfcanl', 'analcalc', 'analdiag', 'arch', + 'atmanlinit', 'atmanlrun', 'atmanlfinal', + 'ocnanalprep', 'ocnanalbmat', 'ocnanalrun', 'ocnanalchkpt', 'ocnanalpost', 'ocnanalvrfy', + 'earc', 'ecen', 'echgres', 'ediag', 'efcs', + 'eobs', 'eomg', 'epos', 'esfc', 'eupd', + 'atmensanlinit', 'atmensanlrun', 'atmensanlfinal', + 'aeroanlinit', 'aeroanlrun', 'aeroanlfinal', + 'preplandobs', 'landanlinit', 'landanlrun', 'landanlfinal', + 'fcst', 'post', 'ocnpost', 'vrfy', 'metp', + 'postsnd', 'awips', 'gempak', + 'wafs', 'wafsblending', 'wafsblending0p25', + 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25', + 'waveawipsbulls', 'waveawipsgridded', 'wavegempak', 'waveinit', + 'wavepostbndpnt', 'wavepostbndpntbll', 'wavepostpnt', 'wavepostsbs', 'waveprep'] + + def __init__(self, app_config: AppConfig, cdump: str) -> None: + + self.app_config = app_config + self.cdump = cdump + + # Save dict_configs and base in the internal state (never know where it may be needed) + self._configs = self.app_config.configs + self._base = self._configs['base'] + + self.n_tiles = 6 # TODO - this needs to be elsewhere + + envar_dict = {'RUN_ENVIR': self._base.get('RUN_ENVIR', 'emc'), + 'HOMEgfs': self._base.get('HOMEgfs'), + 'EXPDIR': self._base.get('EXPDIR'), + 'ROTDIR': self._base.get('ROTDIR'), + 'NET': 'gfs', + 'CDUMP': self.cdump, + 'RUN': self.cdump, + 'CDATE': '@Y@m@d@H', + 'PDY': '@Y@m@d', + 'cyc': '@H', + 'COMROOT': self._base.get('COMROOT'), + 'DATAROOT': self._base.get('DATAROOT')} + self.envars = self._set_envars(envar_dict) + + @staticmethod + def _set_envars(envar_dict) -> list: + + envars = [] + for key, value in envar_dict.items(): + envars.append(rocoto.create_envar(name=key, value=str(value))) + + return envars + + @staticmethod + def _get_hybgroups(nens: int, nmem_per_group: int, start_index: int = 1): + ngrps = nens / nmem_per_group + groups = ' '.join([f'{x:02d}' for x in range(start_index, int(ngrps) + 1)]) + return groups + + @staticmethod + def _is_this_a_gdas_task(cdump, task_name): + if cdump != 'enkfgdas': + raise TypeError(f'{task_name} must be part of the "enkfgdas" cycle and not {cdump}') + + def _template_to_rocoto_cycstring(self, template: str, subs_dict: dict = {}) -> str: + ''' + Takes a string templated with ${ } and converts it into a string suitable + for use in a rocoto . Some common substitutions are defined by + default. Any additional variables in the template and overrides of the + defaults can be passed in by an optional dict. + + Variables substitued by default: + ${ROTDIR} -> '&ROTDIR;' + ${RUN} -> self.cdump + ${DUMP} -> self.cdump + ${MEMDIR} -> '' + ${YMD} -> '@Y@m@d' + ${HH} -> '@H' + + Parameters + ---------- + template: str + Template string with variables to be replaced + subs_dict: dict, optional + Dictionary containing substitutions + + Returns + ------- + str + Updated string with variables substituted + + ''' + + # Defaults + rocoto_conversion_dict = { + 'ROTDIR': '&ROTDIR;', + 'RUN': self.cdump, + 'DUMP': self.cdump, + 'MEMDIR': '', + 'YMD': '@Y@m@d', + 'HH': '@H' + } + + rocoto_conversion_dict.update(subs_dict) + + return Template.substitute_structure(template, + TemplateConstants.DOLLAR_CURLY_BRACE, + rocoto_conversion_dict.get) + + def get_resource(self, task_name): + """ + Given a task name (task_name) and its configuration (task_names), + return a dictionary of resources (task_resource) used by the task. + Task resource dictionary includes: + account, walltime, cores, nodes, ppn, threads, memory, queue, partition, native + """ + + scheduler = self.app_config.scheduler + + task_config = self._configs[task_name] + + account = task_config['ACCOUNT'] + + walltime = task_config[f'wtime_{task_name}'] + if self.cdump in ['gfs'] and f'wtime_{task_name}_gfs' in task_config.keys(): + walltime = task_config[f'wtime_{task_name}_gfs'] + + cores = task_config[f'npe_{task_name}'] + if self.cdump in ['gfs'] and f'npe_{task_name}_gfs' in task_config.keys(): + cores = task_config[f'npe_{task_name}_gfs'] + + ppn = task_config[f'npe_node_{task_name}'] + if self.cdump in ['gfs'] and f'npe_node_{task_name}_gfs' in task_config.keys(): + ppn = task_config[f'npe_node_{task_name}_gfs'] + + nodes = np.int(np.ceil(np.float(cores) / np.float(ppn))) + + threads = task_config[f'nth_{task_name}'] + if self.cdump in ['gfs'] and f'nth_{task_name}_gfs' in task_config.keys(): + threads = task_config[f'nth_{task_name}_gfs'] + + memory = task_config.get(f'memory_{task_name}', None) + + native = None + if scheduler in ['pbspro']: + native = '-l debug=true,place=vscatter' + if task_config.get('is_exclusive', False): + native += ':exclhost' + elif scheduler in ['slurm']: + native = '&NATIVE_STR;' + + queue = task_config['QUEUE_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config['QUEUE'] + + partition = None + if scheduler in ['slurm']: + partition = task_config['PARTITION_SERVICE'] if task_name in Tasks.SERVICE_TASKS else task_config[ + 'PARTITION_BATCH'] + + task_resource = {'account': account, + 'walltime': walltime, + 'nodes': nodes, + 'cores': cores, + 'ppn': ppn, + 'threads': threads, + 'memory': memory, + 'native': native, + 'queue': queue, + 'partition': partition} + + return task_resource + + def get_task(self, task_name, *args, **kwargs): + """ + Given a task_name, call the method for that task + """ + try: + return getattr(self, task_name, *args, **kwargs)() + except AttributeError: + raise AttributeError(f'"{task_name}" is not a valid task.\n' + + 'Valid tasks are:\n' + + f'{", ".join(Tasks.VALID_TASKS)}') + + # Specific Tasks begin here + def coupled_ic(self): + + cpl_ic = self._configs['coupled_ic'] + + deps = [] + + # Atm ICs + if self.app_config.do_atm: + atm_res = self._base.get('CASE', 'C384') + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ATMIC']}/@Y@m@d@H/{self.cdump}" + for file in ['gfs_ctrl.nc'] + \ + [f'{datatype}_data.tile{tile}.nc' + for datatype in ['gfs', 'sfc'] + for tile in range(1, self.n_tiles + 1)]: + data = f"{prefix}/{atm_res}/INPUT/{file}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + else: # data-atmosphere + # TODO - need more information about how these forcings are stored + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_DATM']}/@Y@m@d@H" + data = f"{prefix}/gefs.@Y@m.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ocean ICs + if self.app_config.do_ocean: + ocn_res = f"{self._base.get('OCNRES', '025'):03d}" + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_OCNIC']}/@Y@m@d@H/ocn" + for res in ['res'] + [f'res_{res_index}' for res_index in range(1, 4)]: + data = f"{prefix}/{ocn_res}/MOM.{res}.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Ice ICs + if self.app_config.do_ice: + ice_res = f"{self._base.get('ICERES', '025'):03d}" + ice_res_dec = f'{float(ice_res) / 100:.2f}' + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_ICEIC']}/@Y@m@d@H/ice" + data = f"{prefix}/{ice_res}/cice5_model_{ice_res_dec}.res_@Y@m@d@H.nc" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Wave ICs + if self.app_config.do_wave: + prefix = f"{cpl_ic['BASE_CPLIC']}/{cpl_ic['CPL_WAVIC']}/@Y@m@d@H/wav" + for wave_grid in self._configs['waveinit']['waveGRD'].split(): + data = f"{prefix}/{wave_grid}/@Y@m@d.@H0000.restart.{wave_grid}" + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('coupled_ic') + task = create_wf_task('coupled_ic', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def prep(self): + + dump_suffix = self._base["DUMP_SUFFIX"] + gfs_cyc = self._base["gfs_cyc"] + dmpdir = self._base["DMPDIR"] + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"], {'RUN': 'gdas'}) + dump_path = self._template_to_rocoto_cycstring(self._base["COM_OBSDMP_TMPL"], + {'DMPDIR': dmpdir, 'DUMP_SUFFIX': dump_suffix}) + + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + deps = [] + dep_dict = {'type': 'metatask', 'name': 'gdaspost', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{atm_hist_path}/gdas.t@Hz.atmf009.nc' + dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + data = f'{dump_path}/{self.cdump}.t@Hz.updated.status.tm00.bufr_d' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = self.cdump + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + + resources = self.get_resource('prep') + task = create_wf_task('prep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def waveinit(self): + + resources = self.get_resource('waveinit') + dependencies = None + if self.app_config.mode in ['cycled']: + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + + task = create_wf_task('waveinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveprep(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('waveprep') + task = create_wf_task('waveprep', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aerosol_init(self): + + input_path = self._template_to_rocoto_cycstring(self._base['COM_ATMOS_INPUT_TMPL']) + restart_path = self._template_to_rocoto_cycstring(self._base['COM_ATMOS_RESTART_TMPL']) + + deps = [] + # Files from current cycle + files = ['gfs_ctrl.nc'] + [f'gfs_data.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + for file in files: + data = f'{input_path}/{file}' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + + # Calculate offset based on CDUMP = gfs | gdas + interval = None + if self.cdump in ['gfs']: + interval = self._base['INTERVAL_GFS'] + elif self.cdump in ['gdas']: + interval = self._base['INTERVAL'] + offset = f'-{interval}' + + # Files from previous cycle + files = [f'@Y@m@d.@H0000.fv_core.res.nc'] + \ + [f'@Y@m@d.@H0000.fv_core.res.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + \ + [f'@Y@m@d.@H0000.fv_tracer.res.tile{tile}.nc' for tile in range(1, self.n_tiles + 1)] + + for file in files: + data = [f'{restart_path}', file] + dep_dict = {'type': 'data', 'data': data, 'offset': [offset, None]} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = 'gfs_seq' + resources = self.get_resource('aerosol_init') + task = create_wf_task('aerosol_init', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def anal(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('anal') + task = create_wf_task('anal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def sfcanl(self): + + deps = [] + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jedilandda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlfinal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('sfcanl') + task = create_wf_task('sfcanl', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def analcalc(self): + + deps = [] + if self.app_config.do_jediatmvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar and self.cdump in ['gdas']: + dep_dict = {'type': 'task', 'name': 'enkfgdasechgres', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('analcalc') + task = create_wf_task('analcalc', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def analdiag(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}anal'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('analdiag') + task = create_wf_task('analdiag', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmanlinit(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_hybvar: + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + else: + dependencies = rocoto.create_dependency(dep=deps) + + gfs_cyc = self._base["gfs_cyc"] + gfs_enkf = True if self.app_config.do_hybvar and 'gfs' in self.app_config.eupd_cdumps else False + + cycledef = self.cdump + if self.cdump in ['gfs'] and gfs_enkf and gfs_cyc != 4: + cycledef = 'gdas' + + resources = self.get_resource('atmanlinit') + task = create_wf_task('atmanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def atmanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmanlrun') + task = create_wf_task('atmanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmanlfinal') + task = create_wf_task('atmanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aeroanlinit(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + + resources = self.get_resource('aeroanlinit') + task = create_wf_task('aeroanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + return task + + def aeroanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('aeroanlrun') + task = create_wf_task('aeroanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def aeroanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('aeroanlfinal') + task = create_wf_task('aeroanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def preplandobs(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('preplandobs') + task = create_wf_task('preplandobs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=f'{self.cdump}_land_prep') + + return task + + def landanlinit(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + + # Either gdaspreplandobs (runs in 18z cycle) or not 18z cycle + sub_deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}preplandobs'} + sub_deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'strneq', 'left': '@H', 'right': 18} + sub_deps.append(rocoto.add_dependency(dep_dict)) + deps.append(rocoto.create_dependency(dep_condition='xor', dep=sub_deps)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('landanlinit') + task = create_wf_task('landanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + return task + + def landanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('landanlrun') + task = create_wf_task('landanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def landanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('landanlfinal') + task = create_wf_task('landanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def ocnanalprep(self): + + dump_suffix = self._base["DUMP_SUFFIX"] + dmpdir = self._base["DMPDIR"] + ocean_hist_path = self._template_to_rocoto_cycstring(self._base["COM_OCEAN_HISTORY_TMPL"]) + + deps = [] + data = f'{ocean_hist_path}/gdas.t@Hz.ocnf009.nc' + dep_dict = {'type': 'data', 'data': data, 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalprep') + task = create_wf_task('ocnanalprep', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalbmat(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalprep'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalbmat') + task = create_wf_task('ocnanalbmat', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalbmat'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ocnanalrun') + task = create_wf_task('ocnanalrun', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalchkpt(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalrun'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_mergensst: + data = f'&ROTDIR;/{self.cdump}.@Y@m@d/@H/atmos/{self.cdump}.t@Hz.sfcanl.nc' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalchkpt') + task = create_wf_task('ocnanalchkpt', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalpost(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalchkpt'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalpost') + task = create_wf_task('ocnanalpost', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def ocnanalvrfy(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('ocnanalvrfy') + task = create_wf_task('ocnanalvrfy', + resources, + cdump=self.cdump, + envar=self.envars, + dependency=dependencies) + + return task + + def fcst(self): + + fcst_map = {'forecast-only': self._fcst_forecast_only, + 'cycled': self._fcst_cycled} + + try: + task = fcst_map[self.app_config.mode]() + except KeyError: + raise NotImplementedError(f'{self.app_config.mode} is not a valid type.\n' + + 'Currently supported forecast types are:\n' + + f'{" | ".join(fcst_map.keys())}') + + return task + + def _fcst_forecast_only(self): + dependencies = [] + + dep_dict = {'type': 'task', 'name': f'{self.cdump}coupled_ic'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: + wave_job = 'waveprep' if self.app_config.model_app in ['ATMW'] else 'waveinit' + dep_dict = {'type': 'task', 'name': f'{self.cdump}{wave_job}'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_aero: + # Calculate offset based on CDUMP = gfs | gdas + interval = None + if self.cdump in ['gfs']: + interval = self._base['INTERVAL_GFS'] + elif self.cdump in ['gdas']: + interval = self._base['INTERVAL'] + offset = f'-{interval}' + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}aerosol_init'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': offset} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies.append(rocoto.create_dependency(dep_condition='or', dep=deps)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + resources = self.get_resource('fcst') + task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def _fcst_cycled(self): + + dep_dict = {'type': 'task', 'name': f'{self.cdump}sfcanl'} + dep = rocoto.add_dependency(dep_dict) + dependencies = rocoto.create_dependency(dep=dep) + + if self.app_config.do_jediocnvar: + dep_dict = {'type': 'task', 'name': f'{self.cdump}ocnanalpost'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_wave and self.cdump in self.app_config.wave_cdumps: + dep_dict = {'type': 'task', 'name': f'{self.cdump}waveprep'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_aero: + dep_dict = {'type': 'task', 'name': f'{self.cdump}aeroanlfinal'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + if self.app_config.do_jedilandda: + dep_dict = {'type': 'task', 'name': f'{self.cdump}landanlfinal'} + dependencies.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=dependencies) + + if self.cdump in ['gdas']: + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('fcst') + task = create_wf_task('fcst', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def post(self): + add_anl_to_post = False + if self.app_config.mode in ['cycled']: + add_anl_to_post = True + + return self._post_task('post', add_anl_to_post=add_anl_to_post) + + def ocnpost(self): + if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost in cycled mode + return self._post_task('ocnpost', add_anl_to_post=False) + + def _post_task(self, task_name, add_anl_to_post=False): + if task_name not in ['post', 'ocnpost']: + raise KeyError(f'Invalid post-processing task: {task_name}') + + if task_name in ['ocnpost']: + add_anl_to_post = False + + def _get_postgroups(cdump, config, add_anl=False): + + fhmin = config['FHMIN'] + fhmax = config['FHMAX'] + fhout = config['FHOUT'] + + # Get a list of all forecast hours + fhrs = [] + if cdump in ['gdas']: + fhrs = range(fhmin, fhmax + fhout, fhout) + elif cdump in ['gfs']: + fhmax = np.max( + [config['FHMAX_GFS_00'], config['FHMAX_GFS_06'], config['FHMAX_GFS_12'], config['FHMAX_GFS_18']]) + fhout = config['FHOUT_GFS'] + fhmax_hf = config['FHMAX_HF_GFS'] + fhout_hf = config['FHOUT_HF_GFS'] + fhrs_hf = range(fhmin, fhmax_hf + fhout_hf, fhout_hf) + fhrs = list(fhrs_hf) + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout)) + + npostgrp = config['NPOSTGRP'] + ngrps = npostgrp if len(fhrs) > npostgrp else len(fhrs) + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + anl = ['anl'] if add_anl else [] + + grp = ' '.join(anl + [f'_{fhr[0]}-{fhr[-1]}' for fhr in fhrs]) + dep = ' '.join(anl + [fhr[-1] for fhr in fhrs]) + lst = ' '.join(anl + ['_'.join(fhr) for fhr in fhrs]) + + return grp, dep, lst + + deps = [] + atm_hist_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_HISTORY_TMPL"]) + data = f'{atm_hist_path}/{self.cdump}.t@Hz.atm.log#dep#.txt' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=deps) + + postenvars = self.envars.copy() + postenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#', + 'ROTDIR': self._base.get('ROTDIR')} + for key, value in postenvar_dict.items(): + postenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_postgroups(self.cdump, self._configs[task_name], add_anl=add_anl_to_post) + vardict = {varname2: varval2, varname3: varval3} + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource(task_name) + task = create_wf_task(task_name, resources, cdump=self.cdump, envar=postenvars, dependency=dependencies, + metatask=task_name, varname=varname1, varval=varval1, vardict=vardict, cycledef=cycledef) + + return task + + def wavepostsbs(self): + deps = [] + for wave_grid in self._configs['wavepostsbs']['waveGRD'].split(): + wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"]) + data = f'{wave_hist_path}/{self.cdump}wave.out_grd.{wave_grid}.@Y@m@d.@H0000' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('wavepostsbs') + task = create_wf_task('wavepostsbs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavepostbndpnt(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavepostbndpnt') + task = create_wf_task('wavepostbndpnt', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavepostbndpntbll(self): + deps = [] + wave_hist_path = self._template_to_rocoto_cycstring(self._base["COM_WAVE_HISTORY_TMPL"]) + data = f'{wave_hist_path}/{self.cdump}.t@Hz.atm.logf180.txt' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavepostbndpntbll') + task = create_wf_task('wavepostbndpntbll', resources, cdump=self.cdump, envar=self.envars, + dependency=dependencies) + + return task + + def wavepostpnt(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave_bnd: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpntbll'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('wavepostpnt') + task = create_wf_task('wavepostpnt', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wavegempak(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wavegempak') + task = create_wf_task('wavegempak', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveawipsbulls(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('waveawipsbulls') + task = create_wf_task('waveawipsbulls', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def waveawipsgridded(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('waveawipsgridded') + task = create_wf_task('waveawipsgridded', resources, cdump=self.cdump, envar=self.envars, + dependency=dependencies) + + return task + + def wafs(self): + return self._wafs_task('wafs') + + def wafsgcip(self): + return self._wafs_task('wafsgcip') + + def wafsgrib2(self): + return self._wafs_task('wafsgrib2') + + def wafsgrib20p25(self): + return self._wafs_task('wafsgrib20p25') + + def _wafs_task(self, task_name): + if task_name not in ['wafs', 'wafsgcip', 'wafsgrib2', 'wafsgrib20p25']: + raise KeyError(f'Invalid WAFS task: {task_name}') + + wafs_path = self._template_to_rocoto_cycstring(self._base["COM_ATMOS_WAFS_TMPL"]) + + deps = [] + fhrlst = [6] + [*range(12, 36 + 3, 3)] + for fhr in fhrlst: + data = f'{wafs_path}/{self.cdump}.t@Hz.wafs.grb2if{fhr:03d}' + dep_dict = {'type': 'data', 'data': data} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource(task_name) + task = create_wf_task(task_name, resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wafsblending(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wafsgrib2'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wafsblending') + task = create_wf_task('wafsblending', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def wafsblending0p25(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}wafsgrib20p25'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('wafsblending0p25') + task = create_wf_task('wafsblending0p25', resources, cdump=self.cdump, envar=self.envars, + dependency=dependencies) + + return task + + def postsnd(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('postsnd') + task = create_wf_task('postsnd', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def awips(self): + + def _get_awipsgroups(cdump, config): + + fhmin = config['FHMIN'] + fhmax = config['FHMAX'] + fhout = config['FHOUT'] + + # Get a list of all forecast hours + fhrs = [] + if cdump in ['gdas']: + fhrs = range(fhmin, fhmax + fhout, fhout) + elif cdump in ['gfs']: + fhmax = np.max( + [config['FHMAX_GFS_00'], config['FHMAX_GFS_06'], config['FHMAX_GFS_12'], config['FHMAX_GFS_18']]) + fhout = config['FHOUT_GFS'] + fhmax_hf = config['FHMAX_HF_GFS'] + fhout_hf = config['FHOUT_HF_GFS'] + if fhmax > 240: + fhmax = 240 + if fhmax_hf > 240: + fhmax_hf = 240 + fhrs_hf = list(range(fhmin, fhmax_hf + fhout_hf, fhout_hf)) + fhrs = fhrs_hf + list(range(fhrs_hf[-1] + fhout, fhmax + fhout, fhout)) + + nawipsgrp = config['NAWIPSGRP'] + ngrps = nawipsgrp if len(fhrs) > nawipsgrp else len(fhrs) + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + grp = ' '.join([f'_{fhr[0]}-{fhr[-1]}' for fhr in fhrs]) + dep = ' '.join([fhr[-1] for fhr in fhrs]) + lst = ' '.join(['_'.join(fhr) for fhr in fhrs]) + + return grp, dep, lst + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + awipsenvars = self.envars.copy() + awipsenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#', + 'ROTDIR': self._base.get('ROTDIR')} + for key, value in awipsenvar_dict.items(): + awipsenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_awipsgroups(self.cdump, self._configs['awips']) + vardict = {varname2: varval2, varname3: varval3} + + resources = self.get_resource('awips') + task = create_wf_task('awips', resources, cdump=self.cdump, envar=awipsenvars, dependency=dependencies, + metatask='awips', varname=varname1, varval=varval1, vardict=vardict) + + return task + + def gempak(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('gempak') + task = create_wf_task('gempak', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def vrfy(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('vrfy') + task = create_wf_task('vrfy', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def fit2obs(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('fit2obs') + task = create_wf_task('fit2obs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def metp(self): + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + metpenvars = self.envars.copy() + metpenvar_dict = {'SDATE_GFS': self._base.get('SDATE_GFS'), + # TODO - in Forecast-only, this is `SDATE` on the RHS + 'METPCASE': '#metpcase#'} + for key, value in metpenvar_dict.items(): + metpenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1 = 'metpcase' + varval1 = 'g2g1 g2o1 pcp1' + + resources = self.get_resource('metp') + task = create_wf_task('metp', resources, cdump=self.cdump, envar=metpenvars, dependency=dependencies, + metatask='metp', varname=varname1, varval=varval1) + + return task + + def arch(self): + deps = [] + if self.app_config.do_vrfy: + dep_dict = {'type': 'task', 'name': f'{self.cdump}vrfy'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_fit2obs and self.cdump in ['gdas']: + dep_dict = {'type': 'task', 'name': f'{self.cdump}fit2obs'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_metp and self.cdump in ['gfs']: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}metp'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostsbs'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_wave_bnd: + dep_dict = {'type': 'task', 'name': f'{self.cdump}wavepostbndpnt'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_ocean: + if self.app_config.mode in ['forecast-only']: # TODO: fix ocnpost to run in cycled mode + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ocnpost'} + deps.append(rocoto.add_dependency(dep_dict)) + # If all verification and ocean/wave coupling is off, add the gdas/gfs post metatask as a dependency + if len(deps) == 0: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}post'} + deps.append(rocoto.add_dependency(dep_dict)) + + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['gdas'] else self.cdump + + resources = self.get_resource('arch') + task = create_wf_task('arch', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + # Start of ensemble tasks + def eobs(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('eobs') + task = create_wf_task('eobs', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def eomg(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}eobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + eomgenvars = self.envars.copy() + eomgenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['eobs']['NMEM_EOMGGRP']) + + resources = self.get_resource('eomg') + task = create_wf_task('eomg', resources, cdump=self.cdump, envar=eomgenvars, dependency=dependencies, + metatask='eomn', varname='grp', varval=groups) + + return task + + def ediag(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}eobs'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('ediag') + task = create_wf_task('ediag', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def eupd(self): + deps = [] + if self.app_config.lobsdiag_forenkf: + dep_dict = {'type': 'task', 'name': f'{self.cdump}ediag'} + else: + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}eomn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('eupd') + task = create_wf_task('eupd', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmensanlinit(self): + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}prep'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = "gdas" + resources = self.get_resource('atmensanlinit') + task = create_wf_task('atmensanlinit', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def atmensanlrun(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlinit'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'metatask', 'name': 'enkfgdasepmn', 'offset': '-06:00:00'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('atmensanlrun') + task = create_wf_task('atmensanlrun', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def atmensanlfinal(self): + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlrun'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + resources = self.get_resource('atmensanlfinal') + task = create_wf_task('atmensanlfinal', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def ecen(self): + + def _get_ecengroups(): + + if self._base.get('DOIAU_ENKF', False): + fhrs = list(self._base.get('IAUFHRS', '6').split(',')) + + necengrp = self._configs['ecen']['NECENGRP'] + ngrps = necengrp if len(fhrs) > necengrp else len(fhrs) + + fhrs = [f'{int(fhr):03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [fhr.tolist() for fhr in fhrs] + + grp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) + dep = ' '.join([f[-1] for f in fhrs]) + lst = ' '.join(['_'.join(f) for f in fhrs]) + + else: + grp = '000' + dep = 'f006' + lst = 'f006' + + return grp, dep, lst + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + ecenenvars = self.envars.copy() + ecenenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#'} + for key, value in ecenenvar_dict.items(): + ecenenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_ecengroups() + vardict = {varname2: varval2, varname3: varval3} + + resources = self.get_resource('ecen') + task = create_wf_task('ecen', resources, cdump=self.cdump, envar=ecenenvars, dependency=dependencies, + metatask='ecmn', varname=varname1, varval=varval1, vardict=vardict) + return task + + def esfc(self): + + # eupd_cdump = 'gdas' if 'gdas' in self.app_config.eupd_cdumps else 'gfs' + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}analcalc'} + deps.append(rocoto.add_dependency(dep_dict)) + if self.app_config.do_jediatmens: + dep_dict = {'type': 'task', 'name': f'{self.cdump}atmensanlfinal'} + else: + dep_dict = {'type': 'task', 'name': f'{self.cdump}eupd'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + resources = self.get_resource('esfc') + task = create_wf_task('esfc', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies) + + return task + + def efcs(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}ecmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}esfc'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + dep_dict = {'type': 'cycleexist', 'condition': 'not', 'offset': '-06:00:00'} + dependencies.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='or', dep=dependencies) + + efcsenvars = self.envars.copy() + efcsenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['efcs']['NMEM_EFCSGRP']) + + if self.cdump == "enkfgfs": + groups = self._get_hybgroups(self._base['NMEM_ENS_GFS'], self._configs['efcs']['NMEM_EFCSGRP_GFS']) + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + resources = self.get_resource('efcs') + task = create_wf_task('efcs', resources, cdump=self.cdump, envar=efcsenvars, dependency=dependencies, + metatask='efmn', varname='grp', varval=groups, cycledef=cycledef) + + return task + + def echgres(self): + + self._is_this_a_gdas_task(self.cdump, 'echgres') + + deps = [] + dep_dict = {'type': 'task', 'name': f'{self.cdump.replace("enkf","")}fcst'} + deps.append(rocoto.add_dependency(dep_dict)) + dep_dict = {'type': 'task', 'name': f'{self.cdump}efcs01'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep_condition='and', dep=deps) + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump + + resources = self.get_resource('echgres') + task = create_wf_task('echgres', resources, cdump=self.cdump, envar=self.envars, dependency=dependencies, + cycledef=cycledef) + + return task + + def epos(self): + + def _get_eposgroups(epos): + fhmin = epos['FHMIN_ENKF'] + fhmax = epos['FHMAX_ENKF'] + fhout = epos['FHOUT_ENKF'] + if self.cdump == "enkfgfs": + fhmax = epos['FHMAX_ENKF_GFS'] + fhout = epos['FHOUT_ENKF_GFS'] + fhrs = range(fhmin, fhmax + fhout, fhout) + + neposgrp = epos['NEPOSGRP'] + ngrps = neposgrp if len(fhrs) > neposgrp else len(fhrs) + + fhrs = [f'f{fhr:03d}' for fhr in fhrs] + fhrs = np.array_split(fhrs, ngrps) + fhrs = [f.tolist() for f in fhrs] + + grp = ' '.join([f'{x:03d}' for x in range(0, ngrps)]) + dep = ' '.join([f[-1] for f in fhrs]) + lst = ' '.join(['_'.join(f) for f in fhrs]) + + return grp, dep, lst + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}efmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + eposenvars = self.envars.copy() + eposenvar_dict = {'FHRGRP': '#grp#', + 'FHRLST': '#lst#'} + for key, value in eposenvar_dict.items(): + eposenvars.append(rocoto.create_envar(name=key, value=str(value))) + + varname1, varname2, varname3 = 'grp', 'dep', 'lst' + varval1, varval2, varval3 = _get_eposgroups(self._configs['epos']) + vardict = {varname2: varval2, varname3: varval3} + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + + resources = self.get_resource('epos') + task = create_wf_task('epos', resources, cdump=self.cdump, envar=eposenvars, dependency=dependencies, + metatask='epmn', varname=varname1, varval=varval1, vardict=vardict, cycledef=cycledef) + + return task + + def earc(self): + + deps = [] + dep_dict = {'type': 'metatask', 'name': f'{self.cdump}epmn'} + deps.append(rocoto.add_dependency(dep_dict)) + dependencies = rocoto.create_dependency(dep=deps) + + earcenvars = self.envars.copy() + earcenvars.append(rocoto.create_envar(name='ENSGRP', value='#grp#')) + + groups = self._get_hybgroups(self._base['NMEM_ENS'], self._configs['earc']['NMEM_EARCGRP'], start_index=0) + + cycledef = 'gdas_half,gdas' if self.cdump in ['enkfgdas'] else self.cdump.replace('enkf', '') + + resources = self.get_resource('earc') + task = create_wf_task('earc', resources, cdump=self.cdump, envar=earcenvars, dependency=dependencies, + metatask='eamn', varname='grp', varval=groups, cycledef=cycledef) + + return task + + +def create_wf_task(task_name, resources, + cdump='gdas', cycledef=None, envar=None, dependency=None, + metatask=None, varname=None, varval=None, vardict=None, + final=False): + tasknamestr = f'{cdump}{task_name}' + metatask_dict = None + if metatask is not None: + tasknamestr = f'{tasknamestr}#{varname}#' + metatask_dict = {'metataskname': f'{cdump}{metatask}', + 'varname': f'{varname}', + 'varval': f'{varval}', + 'vardict': vardict} + + cycledefstr = cdump.replace('enkf', '') if cycledef is None else cycledef + + task_dict = {'taskname': f'{tasknamestr}', + 'cycledef': f'{cycledefstr}', + 'maxtries': '&MAXTRIES;', + 'command': f'&JOBS_DIR;/{task_name}.sh', + 'jobname': f'&PSLOT;_{tasknamestr}_@H', + 'resources': resources, + 'log': f'&ROTDIR;/logs/@Y@m@d@H/{tasknamestr}.log', + 'envars': envar, + 'dependency': dependency, + 'final': final} + + task = rocoto.create_task(task_dict) if metatask is None else rocoto.create_metatask(task_dict, metatask_dict) + + return ''.join(task) + + +def get_wf_tasks(app_config: AppConfig) -> List: + """ + Take application configuration to return a list of all tasks for that application + """ + + tasks = [] + # Loop over all keys of cycles (CDUMP) + for cdump, cdump_tasks in app_config.task_names.items(): + task_obj = Tasks(app_config, cdump) # create Task object based on cdump + for task_name in cdump_tasks: + tasks.append(task_obj.get_task(task_name)) + + return tasks diff --git a/workflow/rocoto/workflow_xml.py b/workflow/rocoto/workflow_xml.py new file mode 120000 index 0000000000..a6e327179c --- /dev/null +++ b/workflow/rocoto/workflow_xml.py @@ -0,0 +1 @@ +workflow_xml_gsl.py \ No newline at end of file diff --git a/workflow/rocoto/workflow_xml_emc.py b/workflow/rocoto/workflow_xml_emc.py new file mode 100644 index 0000000000..55a3a24950 --- /dev/null +++ b/workflow/rocoto/workflow_xml_emc.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python3 + +import os +from distutils.spawn import find_executable +from datetime import datetime +from pygw.timetools import to_timedelta +from collections import OrderedDict +from typing import Dict +from applications import AppConfig +from rocoto.workflow_tasks import get_wf_tasks +import rocoto.rocoto as rocoto + + +class RocotoXML: + + def __init__(self, app_config: AppConfig, rocoto_config: Dict) -> None: + + self._app_config = app_config + self.rocoto_config = rocoto_config + + self._base = self._app_config.configs['base'] + + self.preamble = self._get_preamble() + self.definitions = self._get_definitions() + self.header = self._get_workflow_header() + self.cycledefs = self._get_cycledefs() + task_list = get_wf_tasks(app_config) + self.tasks = '\n'.join(task_list) + self.footer = self._get_workflow_footer() + + self.xml = self._assemble_xml() + + @staticmethod + def _get_preamble(): + """ + Generate preamble for XML + """ + + strings = ['', + '', + ''] + + return '\n'.join(strings) + + def _get_definitions(self) -> str: + """ + Create entities related to the experiment + """ + + entity = OrderedDict() + + entity['PSLOT'] = self._base['PSLOT'] + + entity['ROTDIR'] = self._base['ROTDIR'] + entity['JOBS_DIR'] = self._base['BASE_JOB'] + + entity['MAXTRIES'] = self.rocoto_config['maxtries'] + + # Put them all in an XML key-value syntax + strings = [] + for key, value in entity.items(): + strings.append('\t' + rocoto.create_entity(key, value)) + + return '\n'.join(strings) + + def _get_workflow_header(self): + """ + Create the workflow header block + """ + + scheduler = self._app_config.scheduler + cyclethrottle = self.rocoto_config['cyclethrottle'] + taskthrottle = self.rocoto_config['taskthrottle'] + verbosity = self.rocoto_config['verbosity'] + + expdir = self._base['EXPDIR'] + + strings = ['', + ']>', + '', + f'', + '', + f'\t{expdir}/logs/@Y@m@d@H.log', + '', + '\t', + ''] + + return '\n'.join(strings) + + def _get_cycledefs(self): + + cycledef_map = {'cycled': self._get_cycledefs_cycled, + 'forecast-only': self._get_cycledefs_forecast_only} + + try: + cycledefs = cycledef_map[self._app_config.mode]() + except KeyError: + raise KeyError(f'{self._app_config.mode} is not a valid application mode.\n' + + 'Valid application modes are:\n' + + f'{", ".join(cycledef_map.keys())}') + + return cycledefs + + def _get_cycledefs_cycled(self): + sdate = self._base['SDATE'] + edate = self._base['EDATE'] + interval = self._base.get('INTERVAL', '06:00:00') + strings = [] + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {sdate.strftime("%Y%m%d%H%M")} {interval}') + sdate = sdate + to_timedelta(interval) + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {interval}') + + if self._app_config.do_jedilandda: + sdate_land_str = sdate.replace(hour=18, minute=0, second=0).strftime("%Y%m%d%H%M") + edate_land_str = edate.strftime("%Y%m%d%H%M") + if edate >= sdate: + strings.append(f'\t{sdate_land_str} {edate_land_str} 24:00:00') + + if self._app_config.gfs_cyc != 0: + sdate_gfs = self._base['SDATE_GFS'] + edate_gfs = self._base['EDATE_GFS'] + interval_gfs = self._base['INTERVAL_GFS'] + strings.append(f'\t{sdate_gfs.strftime("%Y%m%d%H%M")} {edate_gfs.strftime("%Y%m%d%H%M")} {interval_gfs}') + + sdate_gfs = sdate_gfs + to_timedelta(interval_gfs) + if sdate_gfs <= edate_gfs: + strings.append(f'\t{sdate_gfs.strftime("%Y%m%d%H%M")} {edate_gfs.strftime("%Y%m%d%H%M")} {interval_gfs}') + + strings.append('') + strings.append('') + + return '\n'.join(strings) + + def _get_cycledefs_forecast_only(self): + sdate = self._base['SDATE'] + edate = self._base['EDATE'] + interval = self._base.get('INTERVAL_GFS', '24:00:00') + strings = [] + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {interval}') + + sdate = sdate + to_timedelta(interval) + if sdate <= edate: + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {interval}') + + strings.append('') + strings.append('') + + return '\n'.join(strings) + + @staticmethod + def _get_workflow_footer(): + """ + Generate workflow footer + """ + + return '\n\n' + + def _assemble_xml(self) -> str: + + strings = [self.preamble, + self.definitions, + self.header, + self.cycledefs, + self.tasks, + self.footer] + + return ''.join(strings) + + def write(self, xml_file: str = None, crontab_file: str = None): + self._write_xml(xml_file=xml_file) + self._write_crontab(crontab_file=crontab_file) + + def _write_xml(self, xml_file: str = None) -> None: + + expdir = self._base['EXPDIR'] + pslot = self._base['PSLOT'] + + if xml_file is None: + xml_file = f"{expdir}/{pslot}.xml" + + with open(xml_file, 'w') as fh: + fh.write(self.xml) + + def _write_crontab(self, crontab_file: str = None, cronint: int = 5) -> None: + """ + Create crontab to execute rocotorun every cronint (5) minutes + """ + + # No point creating a crontab if rocotorun is not available. + rocotoruncmd = find_executable('rocotorun') + if rocotoruncmd is None: + print('Failed to find rocotorun, crontab will not be created') + return + + expdir = self._base['EXPDIR'] + pslot = self._base['PSLOT'] + + rocotorunstr = f'{rocotoruncmd} -d {expdir}/{pslot}.db -w {expdir}/{pslot}.xml' + cronintstr = f'*/{cronint} * * * *' + + try: + replyto = os.environ['REPLYTO'] + except KeyError: + replyto = '' + + strings = ['', + f'#################### {pslot} ####################', + f'MAILTO="{replyto}"', + f'{cronintstr} {rocotorunstr}', + '#################################################################', + ''] + + if crontab_file is None: + crontab_file = f"{expdir}/{pslot}.crontab" + + with open(crontab_file, 'w') as fh: + fh.write('\n'.join(strings)) + + return diff --git a/workflow/rocoto/workflow_xml_gsl.py b/workflow/rocoto/workflow_xml_gsl.py new file mode 100644 index 0000000000..01cb8d7551 --- /dev/null +++ b/workflow/rocoto/workflow_xml_gsl.py @@ -0,0 +1,230 @@ +#!/usr/bin/env python3 + +import os +from distutils.spawn import find_executable +from datetime import datetime +from pygw.timetools import to_timedelta +from collections import OrderedDict +from typing import Dict +from applications import AppConfig +from rocoto.workflow_tasks import get_wf_tasks +import rocoto.rocoto as rocoto + + +class RocotoXML: + + def __init__(self, app_config: AppConfig, rocoto_config: Dict) -> None: + + self._app_config = app_config + self.rocoto_config = rocoto_config + + self._base = self._app_config.configs['base'] + + self.preamble = self._get_preamble() + self.definitions = self._get_definitions() + self.header = self._get_workflow_header() + self.cycledefs = self._get_cycledefs() + task_list = get_wf_tasks(app_config) + self.tasks = '\n'.join(task_list) + self.footer = self._get_workflow_footer() + + self.xml = self._assemble_xml() + + @staticmethod + def _get_preamble(): + """ + Generate preamble for XML + """ + + strings = ['', + '', + ''] + + return '\n'.join(strings) + + def _get_definitions(self) -> str: + """ + Create entities related to the experiment + """ + + entity = OrderedDict() + + entity['PSLOT'] = self._base['PSLOT'] + + entity['HOMEgfs'] = self._base['HOMEgfs'] + entity['EXPDIR'] = self._base['EXPDIR'] + entity['ROTDIR'] = self._base['ROTDIR'] + entity['JOBS_DIR'] = self._base['BASE_JOB'] + + entity['NATIVE_STR'] = '--export=NONE' + entity['MAXTRIES'] = self.rocoto_config['maxtries'] + + # Put them all in an XML key-value syntax + strings = [] + for key, value in entity.items(): + strings.append('\t' + rocoto.create_entity(key, value)) + + return '\n'.join(strings) + + def _get_workflow_header(self): + """ + Create the workflow header block + """ + + scheduler = self._app_config.scheduler + cyclethrottle = self.rocoto_config['cyclethrottle'] + taskthrottle = self.rocoto_config['taskthrottle'] + verbosity = self.rocoto_config['verbosity'] + + expdir = self._base['EXPDIR'] + + strings = ['', + ']>', + '', + f'', + '', + f'\t{expdir}/logs/@Y@m@d@H.log', + '', + '\t', + ''] + + return '\n'.join(strings) + + def _get_cycledefs(self): + + cycledef_map = {'cycled': self._get_cycledefs_cycled, + 'forecast-only': self._get_cycledefs_forecast_only} + + try: + cycledefs = cycledef_map[self._app_config.mode]() + except KeyError: + raise KeyError(f'{self._app_config.mode} is not a valid application mode.\n' + + 'Valid application modes are:\n' + + f'{", ".join(cycledef_map.keys())}') + + return cycledefs + + def _get_cycledefs_cycled(self): + sdate = self._base['SDATE'] + edate = self._base['EDATE'] + interval = self._base.get('INTERVAL', '06:00:00') + strings = [] + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {sdate.strftime("%Y%m%d%H%M")} {interval}') + sdate = sdate + to_timedelta(interval) + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {interval}') + + if self._app_config.do_jedilandda: + sdate_land_str = sdate.replace(hour=18, minute=0, second=0).strftime("%Y%m%d%H%M") + edate_land_str = edate.strftime("%Y%m%d%H%M") + if edate >= sdate: + strings.append(f'\t{sdate_land_str} {edate_land_str} 24:00:00') + + if self._app_config.gfs_cyc != 0: + sdate_gfs = self._base['SDATE_GFS'] + edate_gfs = self._base['EDATE_GFS'] + interval_gfs = self._base['INTERVAL_GFS'] + strings.append(f'\t{sdate_gfs.strftime("%Y%m%d%H%M")} {edate_gfs.strftime("%Y%m%d%H%M")} {interval_gfs}') + + sdate_gfs = sdate_gfs + to_timedelta(interval_gfs) + if sdate_gfs <= edate_gfs: + strings.append(f'\t{sdate_gfs.strftime("%Y%m%d%H%M")} {edate_gfs.strftime("%Y%m%d%H%M")} {interval_gfs}') + + strings.append('') + strings.append('') + + return '\n'.join(strings) + + def _get_cycledefs_forecast_only(self): + sdate = self._base['SDATE'] + edate = self._base['EDATE'] + interval = self._base.get('INTERVAL_GFS', '24:00:00') + strings = [] + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {interval}') + + sdate = sdate + to_timedelta(interval) + if sdate <= edate: + strings.append(f'\t{sdate.strftime("%Y%m%d%H%M")} {edate.strftime("%Y%m%d%H%M")} {interval}') + + strings.append('') + strings.append('') + + return '\n'.join(strings) + + @staticmethod + def _get_workflow_footer(): + """ + Generate workflow footer + """ + + return '\n\n' + + def _assemble_xml(self) -> str: + + strings = [self.preamble, + self.definitions, + self.header, + self.cycledefs, + self.tasks, + self.footer] + + return ''.join(strings) + + def write(self, xml_file: str = None, crontab_file: str = None): + self._write_xml(xml_file=xml_file) + self._write_crontab(crontab_file=crontab_file) + + def _write_xml(self, xml_file: str = None) -> None: + + expdir = self._base['EXPDIR'] + pslot = self._base['PSLOT'] + + if xml_file is None: + xml_file = f"{expdir}/{pslot}.xml" + + with open(xml_file, 'w') as fh: + fh.write(self.xml) + + def _write_crontab(self, crontab_file: str = None, cronint: int = 5) -> None: + """ + Create crontab to execute rocotorun every cronint (5) minutes + """ + + # No point creating a crontab if rocotorun is not available. + rocotoruncmd = find_executable('rocotorun') + if rocotoruncmd is None: + print('Failed to find rocotorun, crontab will not be created') + return + + expdir = self._base['EXPDIR'] + pslot = self._base['PSLOT'] + + rocotorunstr = f'{rocotoruncmd} -d {expdir}/{pslot}.db -w {expdir}/{pslot}.xml' + cronintstr = f'*/{cronint} * * * *' + + try: + replyto = os.environ['REPLYTO'] + except KeyError: + replyto = '' + + strings = ['', + f'#################### {pslot} ####################', + f'MAILTO="{replyto}"', + f'{cronintstr} {rocotorunstr}', + '#################################################################', + ''] + + if crontab_file is None: + crontab_file = f"{expdir}/{pslot}.crontab" + + with open(crontab_file, 'w') as fh: + fh.write('\n'.join(strings)) + + return diff --git a/ush/rocoto/rocoto_viewer.py b/workflow/rocoto_viewer.py similarity index 97% rename from ush/rocoto/rocoto_viewer.py rename to workflow/rocoto_viewer.py index 63db6f2538..95dd9e76dd 100755 --- a/ush/rocoto/rocoto_viewer.py +++ b/workflow/rocoto_viewer.py @@ -12,9 +12,14 @@ # rocoto_viewer.py -w my_gfs-workflow.xml -d my_database.db # # The script is located in the directory para/exp/rocoto/rocotoviewers/rocotoviewer_curses/rocoto_viewer.py -# The view will continuously update every four minutes and reflect the current status of your workflow. You may use your mouse or arrow keys to select a particular task and view its status details by pressing the key \p c as indicated as \b \ (which runs \b rocotocheck) or perform a \b rocotorewind by pressing \b \ to restart the workflow at that point. Running \b rocotorewind causes the state information of that task to be cleared from the database and resubmits the job to the scheduler. +# The view will continuously update every four minutes and reflect the current status of your workflow. +# You may use your mouse or arrow keys to select a particular task and view its status details by pressing the key \p c as indicated as \b \ +# (which runs \b rocotocheck) or perform a \b rocotorewind by pressing \b \ to restart the workflow at that point. +# Running \b rocotorewind causes the state information of that task to be cleared from the database and resubmits the job to the scheduler. # -# Tasks marked with the \b \< symbol are \b metatasks and can be expanded by highlight that task with the mouse, and then clicking on the \b \< symbol which then changes to \b \> . You can then click on the \b \> symbol to collapse it again. Alternatively, you can select the 'x' to expand and collapse metatasks when selected. +# Tasks marked with the \b \< symbol are \b metatasks and can be expanded by highlight that task with the mouse, +# and then clicking on the \b \< symbol which then changes to \b \>. +# You can then click on the \b \> symbol to collapse it again. Alternatively, you can select the 'x' to expand and collapse metatasks when selected. # # @cond ROCOTO_VIEWER_CURSES @@ -125,6 +130,7 @@ mlines = 0 mcols = 0 + def eprint(message: str) -> None: """ Print to stderr instead of stdout @@ -208,7 +214,7 @@ def string_to_timedelta(td_string: str) -> timedelta: and mdict['negative'] == '-': return -dt return dt - except(TypeError, ValueError, AttributeError): + except (TypeError, ValueError, AttributeError): raise @@ -941,13 +947,13 @@ def get_tasklist(workflow_file): task_cycledefs = cycle_noname if list_tasks: print(f"{task_name}, {task_cycledefs}") - # dependancies = child.getiterator('dependency') + # dependancies = child.iter('dependency') # for dependency in dependancies: # for them in dependency.getchildren(): # print(them.attrib) tasks_ordered.append((task_name, task_cycledefs, log_file)) elif child.tag == 'metatask': - all_metatasks_iterator = child.getiterator('metatask') + all_metatasks_iterator = child.iter('metatask') all_vars = dict() all_tasks = [] for i, metatasks in enumerate(all_metatasks_iterator): @@ -1113,9 +1119,14 @@ def get_rocoto_stat(params, queue_stat): (theid, jobid, task_order, taskname, cycle, state, exit_status, duration, tries) = row if jobid != '-': if use_performance_metrics: - line = f"{datetime.fromtimestamp(cycle).strftime('%Y%m%d%H%M')} {taskname} {str(jobid)} {str(state)} {str(exit_status)} {str(tries)} {str(duration).split('.')[0]} {str(slots)} {str(qtime)} {str(cputime).split('.')[0]} {str(runtime)}" + line = (f"{datetime.fromtimestamp(cycle).strftime('%Y%m%d%H%M')} " + f"{taskname} {str(jobid)} {str(state)} {str(exit_status)} " + f"{str(tries)} {str(duration).split('.')[0]} {str(slots)} " + f"{str(qtime)} {str(cputime).split('.')[0]} {str(runtime)}") else: - line = f"{datetime.fromtimestamp(cycle).strftime('%Y%m%d%H%M')} {taskname} {str(jobid)} {str(state)} {str(exit_status)} {str(tries)} {str(duration).split('.')[0]}" + line = (f"{datetime.fromtimestamp(cycle).strftime('%Y%m%d%H%M')} " + f"{taskname} {str(jobid)} {str(state)} {str(exit_status)} " + f"{str(tries)} {str(duration).split('.')[0]}") info[cycle].append(line) for every_cycle in cycles: @@ -1279,7 +1290,8 @@ def main(screen): use_multiprocessing = False # header_string = ' '*18+'CYCLE'+' '*17+'TASK'+' '*39+'JOBID'+' '*6+'STATE'+' '*9+'EXIT'+' '*2+'TRIES'+' '*2+'DURATION' - header_string = ' ' * 7 + 'CYCLE' + ' ' * (int(job_name_length_max / 2) + 3) + 'TASK' + ' ' * (int(job_name_length_max / 2) + 3) + 'JOBID' + ' ' * 6 + 'STATE' + ' ' * 9 + 'EXIT' + ' ' * 1 + 'TRIES' + ' ' * 1 + 'DURATION' + header_string = ' ' * 7 + 'CYCLE' + ' ' * (int(job_name_length_max / 2) + 3) + 'TASK' + ' ' * (int(job_name_length_max / 2) + 3) + \ + 'JOBID' + ' ' * 6 + 'STATE' + ' ' * 9 + 'EXIT' + ' ' * 1 + 'TRIES' + ' ' * 1 + 'DURATION' header_string_under = '=== (updated:tttttttttttttttt) =================== PSLOT: pslot ' + '=' * 44 global use_performance_metrics @@ -1336,8 +1348,10 @@ def main(screen): html_ptr = open(html_output_file, 'w') html_ptr.write(ccs_html) stat_update_time = str(datetime.now()).rsplit(':', 1)[0] - html_discribe_line = f'\n\n\n\n' - html_discribe_line += f'\n\n
ExpandRefreshed: {stat_update_time}PSLOT: {PSLOT}
ROTDIR: {workflow_name}Turn Around Times
\n
\n' + html_discribe_line = f'\n\n\n\n' + html_discribe_line += f'' + html_discribe_line += f'\n\n
' + html_discribe_line += f'ExpandRefreshed: {stat_update_time}PSLOT: {PSLOT}
ROTDIR: {workflow_name}Turn Around Times
\n
\n' html_discribe_line += html_header_line html_ptr.write(html_discribe_line) else: @@ -1701,7 +1715,9 @@ def main(screen): column = column[:7] html_line += f'{column}' elif i == 3: - if meta_tasks[cycle][line_num][1] and len(metatasks_state_string_cycle[cycle][columns[1]].split()) != 1 and metatasks_state_cycle[cycle][columns[1]]: + if meta_tasks[cycle][line_num][1] \ + and len(metatasks_state_string_cycle[cycle][columns[1]].split()) != 1 \ + and metatasks_state_cycle[cycle][columns[1]]: column = metatasks_state_string_cycle[cycle][columns[1]] if len(column) > 15: if column.split()[1] == 'SUCCEEDED': @@ -1780,8 +1796,10 @@ def main(screen): html_ptr = open(html_output_file, 'w') html_ptr.write(ccs_html) stat_update_time = str(datetime.now()).rsplit(':', 1)[0] - html_discribe_line = f'\n\n\n\n' - html_discribe_line += f'\n\n
CollapseRefreshed: {stat_update_time}PSLOT: {PSLOT}
ROTDIR: {workflow_name}Turn Around Times
\n
\n' + html_discribe_line = f'\n\n\n\n' + html_discribe_line += f'\n\n
' + html_discribe_line += f'CollapseRefreshed: {stat_update_time}PSLOT: {PSLOT}
ROTDIR: {workflow_name}' + html_discribe_line += f'Turn Around Times
\n
\n' html_discribe_line += html_header_line html_ptr.write(html_discribe_line) html_output_firstpass = False @@ -1947,7 +1965,9 @@ def main(screen): else: pad.addstr(job_id + ' ' * (11 - len(job_id))) elif i == 3: - if meta_tasks[cycle][line_num][1] and len(metatasks_state_string_cycle[cycle][columns[1]].split()) != 1 and metatasks_state_cycle[cycle][columns[1]]: + if meta_tasks[cycle][line_num][1] \ + and len(metatasks_state_string_cycle[cycle][columns[1]].split()) != 1 \ + and metatasks_state_cycle[cycle][columns[1]]: column = metatasks_state_string_cycle[cycle][columns[1]] if red_override: the_text_color = 2 diff --git a/workflow/setup_ecf.py b/workflow/setup_ecf.py new file mode 100755 index 0000000000..f4361a6969 --- /dev/null +++ b/workflow/setup_ecf.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 + +""" + PROGRAM: + Create a workflow file for use by a supercomputer. + AUTHOR: + Kyle Nevins + kyle.nevins@noaa.gov + FILE DEPENDENCIES: + 1. The configuration file that defines what jobs to run. It should be a + YAML file following the syntax defined in the README. + 2. config files for the experiment; e.g. config.base, config.fcst[.gfs] + etc. + Without this dependency, the script will fail + 3. The workflow utils package from the existing Rocoto generator. That + is used to read in the configuration files in the expdir. + 4. Any scripts defined in the YAML file must be present within the + script repository. + OUTPUT: + 1. Either an ecFlow definition file or a Rocoto XML file + 2. The folders and scripts needed to run either the ecflow suite or + Rocoto suite. +""" + +import os +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +from configuration import Configuration +from ecFlow.ecflow_setup import Ecflowsetup + + +def input_args(): + """ + Method to collect user arguments for `setup_workflow.py` + """ + parser = ArgumentParser(description=""" Create the workflow files for + ecFlow by deploying scripts and definition + files or Rocoto""", + formatter_class=ArgumentDefaultsHelpFormatter) + parser.add_argument('--ecflow-config', type=str, + default='ecflow_build.yml', required=False, + help='ecFlow Generator configuration file') + parser.add_argument('--expdir', type=str, + required=False, default=os.environ['PWD'], + help="""This is to be the full path to experiment' + 'directory containing config files""") + parser.add_argument('--savedir', type=str, + default=os.environ['PWD'], required=False, + help='Location to save the definition files') + arguments = parser.parse_args() + + return arguments + + +if __name__ == "__main__": + """ + This is the main function that will read in the command line arguments + using the parse_command_line function and create an array for the + environment configurations to be used throughout the application. + + For the ecFlow setup, it sets up a new workflow and then uses the generic + functions which are available for the Rocoto setup as well of + generate_workflow and save. + """ + + args = input_args() + + cfg = Configuration(args.expdir) + envconfigs = dict() + envconfigs['base'] = cfg.parse_config('config.base') + + workflow = Ecflowsetup(args, envconfigs) + workflow.generate_workflow() + workflow.save() diff --git a/workflow/setup_expt.py b/workflow/setup_expt.py new file mode 100755 index 0000000000..9e91c860da --- /dev/null +++ b/workflow/setup_expt.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python3 + +""" +Entry point for setting up an experiment in the global-workflow +""" + +import os +import glob +import shutil +import warnings +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter, SUPPRESS + +from hosts import Host + +from pygw.yaml_file import parse_j2yaml +from pygw.attrdict import AttrDict +from pygw.timetools import to_datetime, to_timedelta, datetime_to_YMDH + + +_here = os.path.dirname(__file__) +_top = os.path.abspath(os.path.join(os.path.abspath(_here), '..')) + + +def makedirs_if_missing(dirname): + """ + Creates a directory if not already present + """ + if not os.path.exists(dirname): + os.makedirs(dirname) + + +def fill_COMROT(host, inputs): + """ + Method to populate the COMROT for supported modes. + INPUTS: + host: host object from class Host + inputs: user inputs to setup_expt.py + """ + + fill_modes = { + 'cycled': fill_COMROT_cycled, + 'forecast-only': fill_COMROT_forecasts + } + + try: + fill_modes[inputs.mode](host, inputs) + except KeyError: + raise NotImplementedError(f'{inputs.mode} is not a supported mode.\n' + + 'Currently supported modes are:\n' + + f'{" | ".join(fill_modes.keys())}') + + return + + +def fill_COMROT_cycled(host, inputs): + """ + Implementation of 'fill_COMROT' for cycled mode + """ + + comrot = os.path.join(inputs.comrot, inputs.pslot) + + do_ocean = do_ice = do_med = False + + if inputs.app in ['S2S', 'S2SW']: + do_ocean = do_ice = do_med = True + + if inputs.icsdir is None: + warnings.warn("User did not provide '--icsdir' to stage initial conditions") + return + + rdatestr = datetime_to_YMDH(inputs.idate - to_timedelta('T06H')) + idatestr = datetime_to_YMDH(inputs.idate) + + if os.path.isdir(os.path.join(inputs.icsdir, f'{inputs.cdump}.{rdatestr[:8]}', rdatestr[8:], 'model_data', 'atmos')): + flat_structure = False + else: + flat_structure = True + + # Destination always uses the new COM structure + # These should match the templates defined in config.com + if inputs.start in ['warm']: + dst_atm_dir = os.path.join('model_data', 'atmos', 'restart') + dst_med_dir = os.path.join('model_data', 'med', 'restart') + else: + dst_atm_dir = os.path.join('model_data', 'atmos', 'input') + dst_med_dir = '' # no mediator files for a "cold start" + do_med = False + dst_ocn_rst_dir = os.path.join('model_data', 'ocean', 'restart') + dst_ocn_anl_dir = os.path.join('analysis', 'ocean') + dst_ice_dir = os.path.join('model_data', 'ice', 'restart') + dst_atm_anl_dir = os.path.join('analysis', 'atmos') + + if flat_structure: + # ICs are in the old flat COM structure + if inputs.start in ['warm']: # This is warm start experiment + src_atm_dir = os.path.join('atmos', 'RESTART') + src_med_dir = os.path.join('med', 'RESTART') + elif inputs.start in ['cold']: # This is a cold start experiment + src_atm_dir = os.path.join('atmos', 'INPUT') + src_med_dir = '' # no mediator files for a "cold start" + do_med = False + # ocean and ice have the same filenames for warm and cold + src_ocn_rst_dir = os.path.join('ocean', 'RESTART') + src_ocn_anl_dir = 'ocean' + src_ice_dir = os.path.join('ice', 'RESTART') + src_atm_anl_dir = 'atmos' + else: + src_atm_dir = dst_atm_dir + src_med_dir = dst_med_dir + src_ocn_rst_dir = dst_ocn_rst_dir + src_ocn_anl_dir = dst_ocn_anl_dir + src_ice_dir = dst_ice_dir + src_atm_anl_dir = dst_atm_anl_dir + + def link_files_from_src_to_dst(src_dir, dst_dir): + files = os.listdir(src_dir) + for fname in files: + os.symlink(os.path.join(src_dir, fname), + os.path.join(dst_dir, fname)) + return + + # Link ensemble member initial conditions + if inputs.nens > 0: + if inputs.start in ['warm']: + enkfdir = f'enkf{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' + elif inputs.start in ['cold']: + enkfdir = f'enkf{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' + + for ii in range(1, inputs.nens + 1): + memdir = f'mem{ii:03d}' + # Link atmospheric files + dst_dir = os.path.join(comrot, enkfdir, memdir, dst_atm_dir) + src_dir = os.path.join(inputs.icsdir, enkfdir, memdir, src_atm_dir) + makedirs_if_missing(dst_dir) + link_files_from_src_to_dst(src_dir, dst_dir) + # ocean, ice, etc. TBD ... + + # Link deterministic initial conditions + + # Link atmospheric files + if inputs.start in ['warm']: + detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' + elif inputs.start in ['cold']: + detdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' + + dst_dir = os.path.join(comrot, detdir, dst_atm_dir) + src_dir = os.path.join(inputs.icsdir, detdir, src_atm_dir) + makedirs_if_missing(dst_dir) + link_files_from_src_to_dst(src_dir, dst_dir) + + # Link ocean files + if do_ocean: + detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' + dst_dir = os.path.join(comrot, detdir, dst_ocn_rst_dir) + src_dir = os.path.join(inputs.icsdir, detdir, src_ocn_rst_dir) + makedirs_if_missing(dst_dir) + link_files_from_src_to_dst(src_dir, dst_dir) + + # First 1/2 cycle needs a MOM6 increment + incdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' + incfile = f'{inputs.cdump}.t{idatestr[8:]}z.ocninc.nc' + src_file = os.path.join(inputs.icsdir, incdir, src_ocn_anl_dir, incfile) + dst_file = os.path.join(comrot, incdir, dst_ocn_anl_dir, incfile) + makedirs_if_missing(os.path.join(comrot, incdir, dst_ocn_anl_dir)) + os.symlink(src_file, dst_file) + + # Link ice files + if do_ice: + detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' + dst_dir = os.path.join(comrot, detdir, dst_ice_dir) + src_dir = os.path.join(inputs.icsdir, detdir, src_ice_dir) + makedirs_if_missing(dst_dir) + link_files_from_src_to_dst(src_dir, dst_dir) + + # Link mediator files + if do_med: + detdir = f'{inputs.cdump}.{rdatestr[:8]}/{rdatestr[8:]}' + dst_dir = os.path.join(comrot, detdir, dst_med_dir) + src_dir = os.path.join(inputs.icsdir, detdir, src_med_dir) + makedirs_if_missing(dst_dir) + link_files_from_src_to_dst(src_dir, dst_dir) + + # Link bias correction and radiance diagnostics files + detdir = f'{inputs.cdump}.{idatestr[:8]}/{idatestr[8:]}' + src_dir = os.path.join(inputs.icsdir, detdir, src_atm_anl_dir) + dst_dir = os.path.join(comrot, detdir, dst_atm_anl_dir) + makedirs_if_missing(dst_dir) + for ftype in ['abias', 'abias_pc', 'abias_air', 'radstat']: + fname = f'{inputs.cdump}.t{idatestr[8:]}z.{ftype}' + src_file = os.path.join(src_dir, fname) + if os.path.exists(src_file): + os.symlink(src_file, os.path.join(dst_dir, fname)) + + return + + +def fill_COMROT_forecasts(host, inputs): + """ + Implementation of 'fill_COMROT' for forecast-only mode + """ + print('forecast-only mode treats ICs differently and cannot be staged here') + return + + +def fill_EXPDIR(inputs): + """ + Method to copy config files from workflow to experiment directory + INPUTS: + inputs: user inputs to `setup_expt.py` + """ + configdir = inputs.configdir + expdir = os.path.join(inputs.expdir, inputs.pslot) + + configs = glob.glob(f'{configdir}/config.*') + exclude_configs = ['base', 'base.emc.dyn', 'base.nco.static', 'fv3.nco.static'] + for exclude in exclude_configs: + try: + configs.remove(f'{configdir}/config.{exclude}') + except ValueError: + pass + if len(configs) == 0: + raise IOError(f'no config files found in {configdir}') + for config in configs: + shutil.copy(config, expdir) + + return + + +def update_configs(host, inputs): + + def _update_defaults(dict_in: dict) -> dict: + defaults = dict_in.pop('defaults', AttrDict()) + defaults.update(dict_in) + return defaults + + # Read in the YAML file to fill out templates and override host defaults + data = AttrDict(host.info, **inputs.__dict__) + data.HOMEgfs = _top + yaml_path = inputs.yaml + yaml_dict = _update_defaults(AttrDict(parse_j2yaml(yaml_path, data))) + + # First update config.base + edit_baseconfig(host, inputs, yaml_dict) + + # loop over other configs and update them + for cfg in yaml_dict.keys(): + if cfg == 'base': + continue + cfg_file = f'{inputs.expdir}/{inputs.pslot}/config.{cfg}' + cfg_dict = get_template_dict(yaml_dict[cfg]) + edit_config(cfg_file, cfg_file, cfg_dict) + + return + + +def edit_baseconfig(host, inputs, yaml_dict): + """ + Parses and populates the templated `config.base.emc.dyn` to `config.base` + """ + + tmpl_dict = { + "@HOMEgfs@": _top, + "@MACHINE@": host.machine.upper()} + + # Replace host related items + extend_dict = get_template_dict(host.info) + tmpl_dict = dict(tmpl_dict, **extend_dict) + + extend_dict = dict() + extend_dict = { + "@PSLOT@": inputs.pslot, + "@SDATE@": datetime_to_YMDH(inputs.idate), + "@EDATE@": datetime_to_YMDH(inputs.edate), + "@CASECTL@": f'C{inputs.resdet}', + "@EXPDIR@": inputs.expdir, + "@ROTDIR@": inputs.comrot, + "@EXP_WARM_START@": inputs.warm_start, + "@MODE@": inputs.mode, + "@gfs_cyc@": inputs.gfs_cyc, + "@APP@": inputs.app + } + tmpl_dict = dict(tmpl_dict, **extend_dict) + + extend_dict = dict() + if getattr(inputs, 'nens', 0) > 0: + extend_dict = { + "@CASEENS@": f'C{inputs.resens}', + "@NMEM_ENS@": inputs.nens, + } + tmpl_dict = dict(tmpl_dict, **extend_dict) + + extend_dict = dict() + if inputs.mode in ['cycled']: + extend_dict = { + "@DOHYBVAR@": "YES" if inputs.nens > 0 else "NO", + } + tmpl_dict = dict(tmpl_dict, **extend_dict) + + # All apps and modes now use the same physics and CCPP suite by default + extend_dict = {"@CCPP_SUITE@": "FV3_GFS_v17_p8", "@IMP_PHYSICS@": 8} + tmpl_dict = dict(tmpl_dict, **extend_dict) + + try: + tmpl_dict = dict(tmpl_dict, **get_template_dict(yaml_dict['base'])) + except KeyError: + pass + + base_input = f'{inputs.configdir}/config.base.emc.dyn' + base_output = f'{inputs.expdir}/{inputs.pslot}/config.base' + edit_config(base_input, base_output, tmpl_dict) + + return + + +def edit_config(input_config, output_config, config_dict): + + # Read input config + with open(input_config, 'rt') as fi: + config_str = fi.read() + + # Substitute from config_dict + for key, val in config_dict.items(): + config_str = config_str.replace(key, str(val)) + + # Ensure no output_config file exists + if os.path.exists(output_config): + os.unlink(output_config) + + # Write output config + with open(output_config, 'wt') as fo: + fo.write(config_str) + + print(f'EDITED: {output_config} as per user input.') + + return + + +def get_template_dict(input_dict): + output_dict = dict() + for key, value in input_dict.items(): + output_dict[f'@{key}@'] = value + + return output_dict + + +def input_args(): + """ + Method to collect user arguments for `setup_expt.py` + """ + + description = """ + Setup files and directories to start a GFS parallel.\n + Create EXPDIR, copy config files.\n + Create COMROT experiment directory structure, + """ + + parser = ArgumentParser(description=description, + formatter_class=ArgumentDefaultsHelpFormatter) + + # Set up sub-parsers for various modes of experimentation + sysparser = parser.add_subparsers(dest='system') + gfs = sysparser.add_parser('gfs', help='arguments for GFS') + gefs = sysparser.add_parser('gefs', help='arguments for GEFS') + + modeparser = gfs.add_subparsers(dest='mode') + cycled = modeparser.add_parser('cycled', help='arguments for cycled mode') + forecasts = modeparser.add_parser('forecast-only', help='arguments for forecast-only mode') + + # Common arguments across all modes + for subp in [cycled, forecasts, gefs]: + subp.add_argument('--pslot', help='parallel experiment name', + type=str, required=False, default='test') + subp.add_argument('--resdet', help='resolution of the deterministic model forecast', + type=int, required=False, default=384) + subp.add_argument('--comrot', help='full path to COMROT', + type=str, required=False, default=os.getenv('HOME')) + subp.add_argument('--expdir', help='full path to EXPDIR', + type=str, required=False, default=os.getenv('HOME')) + subp.add_argument('--idate', help='starting date of experiment, initial conditions must exist!', + required=True, type=lambda dd: to_datetime(dd)) + subp.add_argument('--edate', help='end date experiment', required=True, type=lambda dd: to_datetime(dd)) + + ufs_apps = ['ATM', 'ATMA', 'ATMW', 'S2S', 'S2SA', 'S2SW'] + + # GFS-only arguments + for subp in [cycled, forecasts]: + subp.add_argument('--start', help='restart mode: warm or cold', type=str, + choices=['warm', 'cold'], required=False, default='cold') + subp.add_argument('--cdump', help='CDUMP to start the experiment', + type=str, required=False, default='gdas') + # --configdir is hidden from help + subp.add_argument('--configdir', help=SUPPRESS, type=str, required=False, default=os.path.join(_top, 'parm/config/gfs')) + subp.add_argument('--yaml', help='Defaults to substitute from', type=str, + required=False, default=os.path.join(_top, 'parm/config/gfs/yaml/defaults.yaml')) + + # ensemble-only arguments + for subp in [cycled, gefs]: + subp.add_argument('--resens', help='resolution of the ensemble model forecast', + type=int, required=False, default=192) + subp.add_argument('--nens', help='number of ensemble members', + type=int, required=False, default=20) + + # GFS/GEFS forecast-only additional arguments + for subp in [forecasts, gefs]: + subp.add_argument('--app', help='UFS application', type=str, + choices=ufs_apps + ['S2SWA'], required=False, default='ATM') + subp.add_argument('--gfs_cyc', help='Number of forecasts per day', type=int, + choices=[1, 2, 4], default=1, required=False) + + # cycled mode additional arguments + cycled.add_argument('--icsdir', help='full path to initial condition directory', type=str, required=False, default=None) + cycled.add_argument('--app', help='UFS application', type=str, + choices=ufs_apps, required=False, default='ATM') + cycled.add_argument('--gfs_cyc', help='cycles to run forecast', type=int, + choices=[0, 1, 2, 4], default=1, required=False) + + # GEFS-only arguments + # Create hidden mode argument since there is real option for GEFS + gefs.add_argument('--mode', help=SUPPRESS, type=str, required=False, default='forecast-only') + # Create hidden start argument since GEFS is always cold start + gefs.add_argument('--start', help=SUPPRESS, type=str, required=False, default='cold') + # Create hidden arguments for configdir and yaml + gefs.add_argument('--configdir', help=SUPPRESS, type=str, required=False, + default=os.path.join(_top, 'parm/config/gefs')) + gefs.add_argument('--yaml', help='Defaults to substitute from', type=str, required=False, + default=os.path.join(_top, 'parm/config/gefs/yaml/defaults.yaml')) + + args = parser.parse_args() + + # Add an entry for warm_start = .true. or .false. + if args.start in ['warm']: + args.warm_start = ".true." + elif args.start in ['cold']: + args.warm_start = ".false." + + return args + + +def query_and_clean(dirname): + """ + Method to query if a directory exists and gather user input for further action + """ + + create_dir = True + if os.path.exists(dirname): + print() + print(f'directory already exists in {dirname}') + print() + overwrite = input('Do you wish to over-write [y/N]: ') + create_dir = True if overwrite in [ + 'y', 'yes', 'Y', 'YES'] else False + if create_dir: + shutil.rmtree(dirname) + + return create_dir + + +def validate_user_request(host, inputs): + supp_res = host.info['SUPPORTED_RESOLUTIONS'] + machine = host.machine + for attr in ['resdet', 'resens']: + try: + expt_res = f'C{getattr(inputs, attr)}' + except AttributeError: + continue + if expt_res not in supp_res: + raise NotImplementedError(f"Supported resolutions on {machine} are:\n{', '.join(supp_res)}") + + +if __name__ == '__main__': + + user_inputs = input_args() + host = Host() + + validate_user_request(host, user_inputs) + + comrot = os.path.join(user_inputs.comrot, user_inputs.pslot) + expdir = os.path.join(user_inputs.expdir, user_inputs.pslot) + + create_comrot = query_and_clean(comrot) + create_expdir = query_and_clean(expdir) + + if create_comrot: + makedirs_if_missing(comrot) + fill_COMROT(host, user_inputs) + + if create_expdir: + makedirs_if_missing(expdir) + fill_EXPDIR(user_inputs) + update_configs(host, user_inputs) diff --git a/workflow/setup_xml.py b/workflow/setup_xml.py new file mode 100755 index 0000000000..d43efe21e1 --- /dev/null +++ b/workflow/setup_xml.py @@ -0,0 +1,71 @@ +#!/usr/bin/env python3 +""" +Entry point for setting up Rocoto XML for all applications in global-workflow +""" + +import os +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + +from applications import AppConfig +from rocoto.workflow_xml import RocotoXML +from pygw.configuration import Configuration + + +def input_args(): + """ + Method to collect user arguments for `setup_xml.py` + """ + + description = """ + Sources configuration files based on application and + creates "$PSLOT.xml" for use with Rocoto. + """ + + parser = ArgumentParser(description=description, + formatter_class=ArgumentDefaultsHelpFormatter) + + # Common arguments across all modes + parser.add_argument('expdir', help='full path to experiment directory containing config files', + type=str, default=os.environ['PWD']) + + parser.add_argument('--maxtries', help='maximum number of retries', type=int, + default=2, required=False) + parser.add_argument('--cyclethrottle', help='maximum number of concurrent cycles', type=int, + default=3, required=False) + parser.add_argument('--taskthrottle', help='maximum number of concurrent tasks', type=int, + default=25, required=False) + parser.add_argument('--verbosity', help='verbosity level of Rocoto', type=int, + default=10, required=False) + + args = parser.parse_args() + + return args + + +def check_expdir(cmd_expdir, cfg_expdir): + + if not os.path.samefile(cmd_expdir, cfg_expdir): + print('MISMATCH in experiment directories!') + print(f'config.base: EXPDIR = {cfg_expdir}') + print(f' input arg: --expdir = {cmd_expdir}') + raise ValueError('Abort!') + + +if __name__ == '__main__': + + user_inputs = input_args() + rocoto_param_dict = {'maxtries': user_inputs.maxtries, + 'cyclethrottle': user_inputs.cyclethrottle, + 'taskthrottle': user_inputs.taskthrottle, + 'verbosity': user_inputs.verbosity} + + cfg = Configuration(user_inputs.expdir) + + check_expdir(user_inputs.expdir, cfg.parse_config('config.base')['EXPDIR']) + + # Configure the application + app_config = AppConfig(cfg) + + # Create Rocoto Tasks and Assemble them into an XML + xml = RocotoXML(app_config, rocoto_param_dict) + xml.write() diff --git a/workflow/test-mynn.sh b/workflow/test-mynn.sh new file mode 100755 index 0000000000..401e21d562 --- /dev/null +++ b/workflow/test-mynn.sh @@ -0,0 +1,17 @@ +USER=Judy.K.Henderson +GITDIR=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa ## where your git checkout is located +COMROT=$GITDIR/FV3GFSrun ## default COMROT directory +EXPDIR=$GITDIR/FV3GFSwfm ## default EXPDIR directory +ICSDIR=/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127 + +PSLOT=test-mynn +IDATE=2022110900 +EDATE=2022110900 +RESDET=768 ## 96 192 384 768 + +### gfs_cyc 1 00Z only; gfs_cyc 2 00Z and 12Z + +./setup_expt.py gfs forecast-only --pslot $PSLOT --gfs_cyc 1 \ + --idate $IDATE --edate $EDATE --resdet $RESDET \ + --comrot $COMROT --expdir $EXPDIR + diff --git a/workflow/test_configuration.py b/workflow/test_configuration.py new file mode 100644 index 0000000000..5c59fd35bf --- /dev/null +++ b/workflow/test_configuration.py @@ -0,0 +1,32 @@ +import sys +from pygw.configuration import Configuration + + +expdir = sys.argv[1] + +cfg = Configuration(expdir) + +print(f'experiment dir: {cfg.config_dir}') + +print('configuration files ...') +line_separator = '\n' # \escapes are not allowed inside f-strings +print(f'{line_separator.join(cfg.config_files)}') + +print(f'config.base: {cfg.find_config("config.base")}') + +print('*' * 80) +print('config.base ...') +base = cfg.parse_config('config.base') +cfg.print_config('config.base') +print(type(base)) +print(base.HOMEgfs) + +print('*' * 80) +print('config.anal...') +cfg.print_config(['config.base', 'config.anal']) + + +print('*' * 80) +print('config.efcs ...') +configs = ['config.base', 'config.fcst', 'config.efcs'] +cfg.print_config(configs) diff --git a/workflow/test_hera.sh b/workflow/test_hera.sh new file mode 100755 index 0000000000..a0ef4aba5e --- /dev/null +++ b/workflow/test_hera.sh @@ -0,0 +1,17 @@ +USER=Judy.K.Henderson +GITDIR=/scratch1/BMC/gsd-fv3-dev/Judy.K.Henderson/test/dev_19may23_0fae23fa ## where your git checkout is located +COMROT=$GITDIR/FV3GFSrun ## default COMROT directory +EXPDIR=$GITDIR/FV3GFSwfm ## default EXPDIR directory +ICSDIR=/scratch1/BMC/gsd-fv3/rtruns/FV3ICS_L127 + +PSLOT=test_hera +IDATE=2022110900 +EDATE=2022110900 +RESDET=768 ## 96 192 384 768 + +### gfs_cyc 1 00Z only; gfs_cyc 2 00Z and 12Z + +./setup_expt.py gfs forecast-only --pslot $PSLOT --gfs_cyc 1 \ + --idate $IDATE --edate $EDATE --resdet $RESDET \ + --comrot $COMROT --expdir $EXPDIR + diff --git a/workflow/test_hosts.py b/workflow/test_hosts.py new file mode 100644 index 0000000000..b9fa969e14 --- /dev/null +++ b/workflow/test_hosts.py @@ -0,0 +1,17 @@ +from hosts import Host + +print(f'supported hosts are: {", ".join(Host.SUPPORTED_HOSTS)}') + +print(f'host detected as: {Host.detect}') +print(f'scheduler on host: {Host().scheduler}') + +print('initializing host ...') +host = Host() + +print(f'hostname: {host.machine}') + +print(f'scheduler on host: {host.scheduler}') + +print('host information ...') +line_separator = '\n' # \escapes are not allowed inside f-strings +print(f'{line_separator.join(f"{key}: {host.info[key]}" for key in host.info.keys())}') diff --git a/workflow/test_jet.sh b/workflow/test_jet.sh new file mode 100755 index 0000000000..b61f483b06 --- /dev/null +++ b/workflow/test_jet.sh @@ -0,0 +1,17 @@ +USER=Judy.K.Henderson +GITDIR=/lfs1/BMC/gsd-fv3-test/jhender/scratch/emc_gw ## where your git checkout is located +COMROT=$GITDIR/FV3GFSrun ## default COMROT directory +EXPDIR=$GITDIR/FV3GFSwfm ## default EXPDIR directory +ICSDIR=/lfs1/BMC/gsd-fv3-test/rtfim/FV3ICS_L127 + +PSLOT=test +IDATE=2022110900 +EDATE=2022110900 +RESDET=768 ## 96 192 384 768 + +### gfs_cyc 1 00Z only; gfs_cyc 2 00Z and 12Z + +./setup_expt.py gfs forecast-only --pslot $PSLOT --gfs_cyc 1 \ + --idate $IDATE --edate $EDATE --resdet $RESDET \ + --comrot $COMROT --expdir $EXPDIR +